服务器模式之在服务器上提交
首先需要在src下放置服务器上的hadoop配置文件
* 服务器模式: 在服务器上提交
* a、把MR程序打包(jar),传送到服务器上
* b、通过: hadoop jar jar路径 类的全限定名
Mapper类:
package com.chb;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.StringUtils;
/**
*
*
*/
public class WCMapper extends Mapper {
/**
* 该方法循环调用,从文件的split中没读取一行调用一次,
* 把该行的所在的下表为key, 内容为value
*
* mapper的输出经过洗牌, 分组, 输出到reducer
* 本例中, 洗牌和分组是默认的
* 所以暂时不用考虑
*/
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String[] words = StringUtils.split(value.toString(), ' ');
for (String w : words) {
context.write(new Text(w), new IntWritable(1));
}
}
}
Reducer类:
package com.chb;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class WCReducer extends Reducer{
/**
* mapper之后的数据,经过洗牌分组, 输出到reducer
* 该方法每处理一组调用一次
*
*/
protected void reduce(Text key, Iterable vals,
Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable iw : vals) {
sum += iw.get();
}
context.write(key, new IntWritable(sum));
}
}
执行类
package com.chb;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class WC {
public static void main(String[] args) {
//加载配置
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(conf);
//创建任务
Job job =Job.getInstance();
job.setJarByClass(WC.class);
job.setJobName("WC");
//Mapper和Reducer类
job.setMapperClass(WCMapper.class);
job.setReducerClass(WCReducer.class);
//Mapper的输出类型
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
//输入输出路径
FileInputFormat.addInputPath(job, new Path("/user/chb/input/"));
Path out = new Path("/user/chb/output/wc");
if(fs.exists(out)) {
fs.delete(out, true);
}
FileOutputFormat.setOutputPath(job, out);
boolean f = job.waitForCompletion(true);
if(f){
System.out.println("任务已经完成了");
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
配置好了连接 通过Eclipse上传文件到hdfs上,出现权限问题: 解决方法:未解决
========================
通过命令将本地的文件传到hdfs上:hadoop fs -put wc.txt /usr/chb/input/
将源文件打成jar包导出:
注意要加上包名