• 周总结四


    周总结四

    核心代码

    package com.mapr;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    import java.io.IOException;
    
    public class WordCountDriver {
        public static void main(String[]args)throws IOException, ClassNotFoundException, InterruptedException{
            // 1 获取配置信息以及封装任务
            Configuration configuration = new Configuration();
            Job job = Job.getInstance(configuration);
            // 2 设置jar加载路径
            job.setJarByClass(WordCountDriver.class);
            // 3 设置map和reduce类
            job.setMapperClass(WordCountMapper.class);
            job.setReducerClass(WordCountReducer.class);
            // 4 设置map输出
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(IntWritable.class);
            // 5 设置Reduce输出
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(IntWritable.class);
            // 6 设置输入和输出路径
            FileInputFormat.setInputPaths(job, new Path("hdfs://192.168.132.128:9000/mymapreduce1/in/MapReduceTry.txt"));
            FileOutputFormat.setOutputPath(job, new Path("hdfs://192.168.132.128:9000/mymapreduce1/out"));
            // 7 提交
            job.waitForCompletion(true);
        }
    }
    
    package com.mapr;
    
    import org.apache.hadoop.io.*;
    import org.apache.hadoop.mapreduce.Mapper;
    import java.io.IOException;
    
    public class WordCountMapper extends Mapper<LongWritable,Text,Text,IntWritable>{
        Text k=new Text();
        IntWritable v=new IntWritable(1);
        protected void map(LongWritable key,Text value,Context context)throws IOException,InterruptedException{
            String line=value.toString();
            String[] words=line.split(" ");
            for(String word:words){
                k.set(word);
                context.write(k,v);
            }
        }
    }
    package com.mapr;
    
    import org.apache.hadoop.io.*;
    import org.apache.hadoop.mapreduce.Reducer;
    import java.io.IOException;
    
    public class WordCountReducer extends Reducer<Text,IntWritable,Text,IntWritable>{
        int sum;
        IntWritable v=new IntWritable();
        protected void reduce(Text key, Iterable<IntWritable> values,Context context)throws IOException,InterruptedException{
            sum=0;
            for(IntWritable count:values){
                sum+=count.get();
            }
            v.set(sum);
            context.write(key,v);
        }
    }
    

    MapReduce实现字符统计,wordcount实现,
    平均每天学习时间:2小时
    代码 700行

  • 相关阅读:
    MyBatis学习 之 三、动态SQL语句
    MyBatis学习 之 三、动态SQL语句
    MyBatis学习 之 二、SQL语句映射文件(2)增删改查、参数、缓存
    MyBatis学习 之 二、SQL语句映射文件(2)增删改查、参数、缓存
    Spring3 MVC使用@ResponseBody的乱码问题及解决办法
    xgqfrms™, xgqfrms® : xgqfrms's offical website of GitHub!
    xgqfrms™, xgqfrms® : xgqfrms's offical website of GitHub!
    xgqfrms™, xgqfrms® : xgqfrms's offical website of GitHub!
    xgqfrms™, xgqfrms® : xgqfrms's offical website of GitHub!
    xgqfrms™, xgqfrms® : xgqfrms's offical website of GitHub!
  • 原文地址:https://www.cnblogs.com/2506236179zhw/p/14226688.html
Copyright © 2020-2023  润新知