static class WordCountReducer extends Reducer<Text, LongWritable,Text,LongWritable> {
@Override
protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
// super.reduce(key, values, context);
long count = 0 ;
for (LongWritable value : values) {
count += value.get();
}
context.write(key,new LongWritable(count));
}
static class WordCountMapper extends Mapper<LongWritable, Text,Text,LongWritable> {
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
// super.map(key, value, context);
String[] s = value.toString().split(" ");
for (String w : s) {
context.write(new Text(w),new LongWritable(1));
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
// 配置文件
Configuration conf = new Configuration();
// 实例
Job instance = Job.getInstance(conf);
// 指定jar
instance.setJarByClass(WordCountDriver.class);
// 指定mapper
instance.setMapperClass(WordCountMapper.class);
instance.setReducerClass(WordCountReducer.class);
// 输出类型
instance.setOutputKeyClass(Text.class);
instance.setOutputValueClass(LongWritable.class);
// 指定文件输入输出类型
FileInputFormat.setInputPaths(instance,new Path("D:\\Program Files\\hadoop-3.2.1\\LICENSE.txt"));
FileOutputFormat.setOutputPath(instance,new Path("D:\\out"));