• WordCount 远程集群源码


    package test;
    import java.io.IOException;
    import java.util.StringTokenizer;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    import org.apache.hadoop.util.GenericOptionsParser;
     
    public class WordCount {
     
      public static class TokenizerMapper
           extends Mapper<Object, Text, Text, IntWritable>{
         
        private final static IntWritable one = new IntWritable(1);
        private Text word = new Text();
           
        public void map(Object key, Text value, Context context
                        ) throws IOException, InterruptedException {
          StringTokenizer itr = new StringTokenizer(value.toString());
          while (itr.hasMoreTokens()) {
            word.set(itr.nextToken());
            context.write(word, one);      }
        }
      }
       
      public static class IntSumReducer extends Reducer<Text,IntWritable,Text,IntWritable> {
        private IntWritable result = new IntWritable();
        
        
            public void reduce(Text key, Iterable<IntWritable> values,
                           Context context
                           ) throws IOException, InterruptedException {
          int sum = 0;
          for (IntWritable val : values) {
            sum += val.get();
          }
          result.set(sum);
          context.write(key, result);
        }
        
        
      }
     
      public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        //conf.set("mapred.job.tracker", "192.168.2.35:9001");
      //在你的文件地址前自动添加:hdfs://master:9000/
        //conf.set("fs.defaultFS", "hdfs://192.168.2.35:9001/");
        //conf.set("hadoop.job.user","hadoop");  
        //指定jobtracker的ip和端口号,master在/etc/hosts中可以配置
        //conf.set("mapred.job.tracker","192.168.2.35:9001");
        
        //在你的文件地址前自动添加:hdfs://master:9000/
    
        conf.set("fs.defaultFS", "hdfs://192.168.2.35:9000/");
        ////conf.set("hadoop.job.user","hadoop");  
       //// conf.set("Master","1234");
        //指定jobtracker的ip和端口号,master在/etc/hosts中可以配置
        //////conf.set("mapred.job.tracker","Master:9001");
        String[] ars=new String[]{"input","out"};
        String[] otherArgs = new GenericOptionsParser(conf, ars).getRemainingArgs();
        if (otherArgs.length != 2) {
          System.err.println("Usage: wordcount  ");
          System.exit(2);
        }
        Job job = new Job(conf, "wordcount");
        job.setJarByClass(WordCount.class);
        job.setMapperClass(TokenizerMapper.class);
        job.setCombinerClass(IntSumReducer.class);
        job.setReducerClass(IntSumReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
        FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
      }
    }
  • 相关阅读:
    打怪(CDQ分治+斜率优化)
    中缀表达式求值
    马拉车(manacher)算法
    后缀数组的求法及应用
    【最小生成树】藏宝图(prim)
    【最小生成树】prim算法
    [最近公共祖先]最近公共祖先(LCA)
    [思维]Supreme Number
    [模拟] Lattice's basics in digital electronics(2018沈阳赛区网络预赛)
    【搜索+思维】Distinctive Character
  • 原文地址:https://www.cnblogs.com/canyangfeixue/p/4599925.html
Copyright © 2020-2023  润新知