• 强-大数据第八讲


    基于Hadoop的WordCount源码示例:

    一、WordCountMain.java

    package demo;

    import java.io.IOException;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

    public class WordCountMain {

    public static void main(String[] args) throws Exception {
    //创建一个job = map + reduce
    Configuration conf = new Configuration();

    //创建一个Job
    Job job = Job.getInstance(conf);
    //指定任务的入口
    job.setJarByClass(WordCountMain.class);

    //指定job的mapper
    job.setMapperClass(WordCountMapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);

    //指定job的reducer
    job.setReducerClass(WordCountReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    //指定任务的输入和输出
    FileInputFormat.setInputPaths(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    //提交任务
    job.waitForCompletion(true);
    }

    }

    二、WordCountMapper.java

    package demo;

    import java.io.IOException;

    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Mapper;

    public class WordCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> {

    @Override
    protected void map(LongWritable key, Text value, Context context)
    throws IOException, InterruptedException {
    /*
    * key: 输入的key
    * value: 数据 I love Beijing
    * context: Map上下文
    */
    String data= value.toString();
    //分词
    String[] words = data.split(" ");

    //输出每个单词
    for(String w:words){
    context.write(new Text(w), new LongWritable(1));
    }
    }

    }

    三、WordCountReducer.java

    package demo;

    import java.io.IOException;

    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Reducer;

    public class WordCountReducer extends Reducer<Text, LongWritable, Text, LongWritable>{

    @Override
    protected void reduce(Text k3, Iterable<LongWritable> v3,Context context) throws IOException, InterruptedException {
    //v3: 是一个集合,每个元素就是v2
    long total = 0;
    for(LongWritable l:v3){
    total = total + l.get();
    }

    //输出
    context.write(k3, new LongWritable(total));
    }

    }

     

  • 相关阅读:
    14.3 Go iris
    14.2 Go性能优化
    14.1 Go数据结构
    13.3 Go章节练习题
    13.2 Go练习题答案
    13.1 Go练习题
    12.1 Go nsq
    11.3 Go 开发博客
    11.2Go gin
    11.1 Go Http
  • 原文地址:https://www.cnblogs.com/Zac1010/p/11175145.html
Copyright © 2020-2023  润新知