public static class WordCountReducer extends MapReduceBase implementsReducer<Text, IntWritable, Text, IntWritable> {
private IntWritable result = new IntWritable();
public void reduce(Text key, Iterator<IntWritable>values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException {
int sum = 0;
while (values.hasNext()) {
sum +=values.next().get();
}
result.set(sum);
output.collect(key, result);
}
}
public static void main(String[] args) throws Exception {
String input = "hdfs://192.168.2.100:9000/user/licz/hdfs/o_t_account";
String output = "hdfs://192.168.2.100:9000/user/licz/hdfs/o_t_account/result";
JobConf conf = new JobConf(WordCount.class);
conf.setJobName("WordCount");
conf.addResource("classpath:/hadoop/core-site.xml");
conf.addResource("classpath:/hadoop/hdfs-site.xml");
conf.addResource("classpath:/hadoop/mapred-site.xml");
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);
conf.setMapperClass(WordCountMapper.class);
conf.setCombinerClass(WordCountReducer.class);
conf.setReducerClass(WordCountReducer.class);
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(conf, new Path(input));
FileOutputFormat.setOutputPath(conf,new Path(output));
JobClient.runJob(conf);
System.exit(0);
}
}
注意:
在windows上使用eclipse用户要与hadoop服务器上安装hadoop的用户名一致,这样才能正常运行,否则会出现没有权限创建目录的报错。
如hadoop安装在了linux服务器的licz用户下,我必需在windows的上的licz用户下使用eclipse开发程序。
这样,我们就可以在eclipse上开发mapreduce程序了。
相关阅读:
Ubuntu 13.04上搭建Hadoop环境