• 云计算-MapReduce


    Hadoop示例程序WordCount详解及实例http://blog.csdn.net/xw13106209/article/details/6116323

    hadoop中使用MapReduce编程实例(转)http://eric-gcm.iteye.com/blog/1807468

    【mapreduce进阶编程二】奇偶行分别求和http://www.aboutyun.com/forum.php?mod=viewthread&tid=9360

    hadoop2.2.0 MapReduce求和并排序http://www.cnblogs.com/mengyao/p/4151509.html

    MapReduce求最大值http://blog.csdn.net/lzm1340458776/article/details/43227759

    慢慢看!

    求最大值:

    package org.apache.hadoop.examples;

    import java.io.IOException;
    import java.util.StringTokenizer;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.conf.Configured;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Partitioner;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    import org.apache.hadoop.util.GenericOptionsParser;
    import org.apache.hadoop.util.Tool;
    import org.apache.hadoop.util.ToolRunner;

    public class Score2 {
    private static String SPILT = " ";
    private static int max = -10000;

    public static class ScoreMapper extends Mapper<LongWritable, Text, Text, Text> {

    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

    int num = Integer.valueOf(String.valueOf(value));

    if (max < num)
    max = num;
    context.write(new Text(String.valueOf(max)), new Text(""));
    }
    }

    public static class MaxReducer extends Reducer<Text, Text, Text, Text> {
    @Override
    public void reduce(Text key, Iterable<Text> value, Context context) throws IOException, InterruptedException {
    int num = Integer.valueOf(String.valueOf(key));

    if (num == max) {
    context.write(new Text("max:"), new Text(String.valueOf(num)));
    }
    }
    }


    public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
    System.out.println("please input at least 2 arguments");
    System.exit(2);
    }

    Job job = Job.getInstance(conf, "max");
    job.setJarByClass(Score2.class);
    job.setMapperClass(ScoreMapper.class);
    job.setReducerClass(MaxReducer.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setNumReduceTasks(1);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
    }
     
    啊啊啊啊啊啊

    import java.io.IOException;
    import java.util.Iterator;
    import java.util.StringTokenizer;

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.io.DoubleWritable;
    import org.apache.hadoop.io.DoubleWritable.Comparator;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.WritableComparable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.Mapper.Context;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    import org.apache.hadoop.mapreduce.Reducer;
    import org.apache.hadoop.util.GenericOptionsParser;

    public class max {
      public static class Map1 extends

         Mapper<Object, Text, Text, IntWritable> {

             public void map(Object key, Text value, Context context)

             throws IOException, InterruptedException {
              int count=0;
              String line = value.toString();
              String s[]=line.split(" ");
              for (int i=0;i<s.length;i++)
              {
               //if (Integer.parseInt(s[i])<min)//s[i]bian int
                //min=Integer.parseInt(s[i]);

               char c[] = s[i].toCharArray();
               if (c[c.length-1]=='h')
                count++;

              }
              context.write(new Text("1"), new IntWritable(count));
              //context.write(new Text(key.tostring),new IntWritable(max));

             }

         }

      public static class Reduce1 extends
      Reducer<Text, IntWritable, Text, IntWritable> {
          public void reduce(Text key, Iterable<IntWritable> values,
          Context context) throws IOException, InterruptedException {
           //int min=1000000;
           int sum=0;
              Iterator<IntWritable> iterator = values.iterator();  //
              while (iterator.hasNext()) {
               int n=iterator.next().get();

              // if (n<min)
               // min=n;
             sum=sum+n;
              }
          context.write(new Text(" "), new IntWritable(sum));
          }
      }

      public static void main(String[] args) throws Exception {

             Configuration conf = new Configuration();
             FileSystem fs = FileSystem.get(conf);
             conf.set("mapred.job.tracker", "127.0.0.1:9000");
             String[] otherArgs = new GenericOptionsParser(conf, args)
                     .getRemainingArgs();
             if (otherArgs.length != 2) { //input output

                 System.err.println("Usage: Data Deduplication <in> <out><out>");
                 System.exit(2);
             }
             Job job = Job.getInstance(conf,"min");

             job.setJarByClass(max.class);

             job.setMapperClass(Map1.class);
             job.setMapOutputKeyClass(Text.class);
             job.setMapOutputValueClass(IntWritable.class);

             job.setReducerClass(Reduce1.class);
             job.setOutputKeyClass(Text.class);
             job.setOutputValueClass(IntWritable.class);

             FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
             Path outpath = new Path(otherArgs[1]);
             if (fs.exists(outpath))
             {
              fs.delete(outpath,true);
             }
             FileOutputFormat.setOutputPath(job, outpath);


             if (job.waitForCompletion(true))
             {

               System.exit(0);

             }

         }


    }

  • 相关阅读:
    远程桌面工具mRemoteNG与Tsmmc
    敏感性Sensitivity和特异性Specificity的说明
    React教程:4 个 useState Hook 示例
    React Hooks究竟是什么呢?
    一步一步搭建前端监控系统:如何监控资源加载错误?
    Promise的三兄弟:all(), race()以及allSettled()
    JavaScript中this究竟指向什么?
    编译器与Debug的传奇:Grace Murray Hopper小传
    21个React开发神器
    8种常见数据结构及其Javascript实现
  • 原文地址:https://www.cnblogs.com/ximiaomiao/p/7003759.html
Copyright © 2020-2023  润新知