• 【Hbase学习之五】HBase MapReduce


    环境
      虚拟机:VMware 10
      Linux版本:CentOS-6.5-x86_64
      客户端:Xshell4
      FTP:Xftp4
      jdk8
      hadoop-2.6.5
      hbase-0.98.12.1-hadoop2

    package wc;
    
    import java.io.IOException;
    import java.util.StringTokenizer;
    
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Mapper;
    
    public class WCMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] strs = value.toString().split(" ");
            for (String string : strs) {
                context.write(new Text(string), new IntWritable(1));
            }
        }
    }
    package wc;
    
    import java.io.IOException;
    
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
    import org.apache.hadoop.hbase.mapreduce.TableReducer;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.Text;
    
    public class WCReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
    
        @Override
        protected void reduce(Text text, Iterable<IntWritable> iterable, Context context)
                throws IOException, InterruptedException {
    
            int sum = 0;
            for (IntWritable it : iterable) {
                sum += it.get();
            }
            //将mr结果输出到HBase
            Put put = new Put(text.toString().getBytes());
            put.add("cf".getBytes(), "ct".getBytes(), (sum + "").getBytes());
            context.write(null, put);
    
        }
    }
    package wc;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    
    public class WCRunner {
    
        public static void main(String[] args) throws Exception {
    
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", "hdfs://node1:8020");
            conf.set("hbase.zookeeper.quorum", "node1,node2,node3");
            Job job = Job.getInstance(conf);
            job.setJarByClass(WCRunner.class);
    
            // 指定mapper 和 reducer
            job.setMapperClass(WCMapper.class);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(IntWritable.class);
            // 最后一个参数设置false
            // adddependecyjars=false  表示不将jar上传分布式集群 即 本地执行
            TableMapReduceUtil.initTableReducerJob("wc", WCReducer.class, job, null, null, null, null, false);
            FileInputFormat.addInputPath(job, new Path("/user/hive/warehouse/wc/"));
            job.waitForCompletion(true);
        }
    }
  • 相关阅读:
    uCOS-II 学习笔记之任务管理--------任务控制块OS_TCB
    uCOS-II 学习笔记之事件管理--------信号量管理的学习
    格子计划
    phpcms二次开发之base.php的桥梁作用
    [leedcode 215] Kth Largest Element in an Array
    [leedcode 214] Shortest Palindrome
    [leedcode 213] House Robber II
    [leedcode 212] Word Search II
    [leedcode 211] Add and Search Word
    [leedcode 210] Course Schedule II
  • 原文地址:https://www.cnblogs.com/cac2020/p/10457927.html
Copyright © 2020-2023  润新知