• "main" java.io.IOException: Mkdirs failed to create /user/centos/hbase-staging (exists=false, cwd=file:/home/centos)



    Exception in thread "main" java.io.IOException: Mkdirs failed to create /user/centos/hbase-staging (exists=false, cwd=file:/home/centos)
    at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
    at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:426)
    at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:906)
    at org.apache.hadoop.io.SequenceFile$Writer.<init>(SequenceFile.java:1071)
    at org.apache.hadoop.io.SequenceFile$RecordCompressWriter.<init>(SequenceFile.java:1371)
    at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:272)
    at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:294)
    at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.writePartitions(HFileOutputFormat2.java:335)
    at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configurePartitioner(HFileOutputFormat2.java:596)
    at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:440)
    at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:405)
    at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:386)
    at bulkloadExample.BulkloadData.main(BulkloadData.java:88)

    package bulkloadExample;
    
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.TableName;
    import org.apache.hadoop.hbase.client.Connection;
    import org.apache.hadoop.hbase.client.ConnectionFactory;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
    import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
    import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
    import org.apache.hadoop.hbase.mapreduce.PutSortReducer;
    import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
    import org.apache.hadoop.hbase.util.Bytes;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.apache.hadoop.mapreduce.Job;
    import org.apache.hadoop.mapreduce.Mapper;
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
    
    import java.io.IOException;
    import java.net.URI;
    
    public class BulkloadData {
    
        static final String JOBNAME = "BulkLoad";
        static final String TABLENAME = "wqbin:duowan_user_bulkLoad";
        static final String PATH_IN = "/datain/duowan_user.txt";    //输入路径
        static final String PATH_OUT = "/dataout";    //输入路径HFILE
    
        static final String SEPARATOR = "	";
    
        static final byte[] ColumnFamily =  "f".getBytes();  // 列簇
    //    static final byte[] row_id =  "id".getBytes();    // 列名
        static final byte[] name =  "name".getBytes();
        static final byte[] pass =  "pass".getBytes();
        static final byte[] mail =  "mail".getBytes();
        static final byte[] nickname =  "nickname".getBytes();
    
        public static class Map extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
            protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
                String[] strArr = value.toString().split(SEPARATOR);
                String row = strArr[0];
                Put put = new Put(Bytes.toBytes(row.toString()));     // rowkey
                put.add(ColumnFamily, name, Bytes.toBytes(strArr[1]));
                put.add(ColumnFamily, pass, Bytes.toBytes(strArr[2]));
                put.add(ColumnFamily, mail, Bytes.toBytes(strArr[3]));
                put.add(ColumnFamily, nickname, Bytes.toBytes(strArr[3]));
    
                context.write(new ImmutableBytesWritable(value.getBytes()), put);
            }
        }
    
        public static void main(String[] args) throws Exception {
    
            Configuration conf = HBaseConfiguration.create();
            Connection conn = ConnectionFactory.createConnection(conf);
    //        conf.set("hbase.zookeeper.quorum", "xx,xx,xx");
            conf.set("fs.defaultFS","file:///");
            System.setProperty("HADOOP_USER_NAME", "centos");
    
            Job job =  Job.getInstance(conf, JOBNAME);
            job.setJarByClass(BulkloadData.class);
    
            job.setMapOutputKeyClass(ImmutableBytesWritable.class);
            job.setMapOutputValueClass(Put.class);
            TableMapReduceUtil.addDependencyJars(job);
            job.setMapperClass(Map.class);
            //这个 SorterReducer(KeyValueSortReducer或PutSortReducer) 可以不指定,
            //因为源码中已经做了判断
            job.setReducerClass(PutSortReducer.class);
            job.setOutputFormatClass(HFileOutputFormat2.class);
    
            FileSystem fs = FileSystem.get(URI.create("/"),conf);
    
            Path outPath = new Path(PATH_OUT);
    //        if (fs.exists(outPath))fs.delete(outPath, true);
    
            FileOutputFormat.setOutputPath(job, outPath);
            FileInputFormat.setInputPaths(job, new Path(PATH_IN));
    
    //        HTable table = new HTable(conf, TABLENAME);
    
            HFileOutputFormat2.configureIncrementalLoad(job, conn.getTable(TableName.valueOf(TABLENAME)),
                    conn.getRegionLocator(TableName.valueOf(TABLENAME)));
            System.out.println("load完毕");
    
            if(job.waitForCompletion(true)){
                LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
    //            loader.doBulkLoad( table);
    
            }
    
            System.exit(0);
        }
    }
  • 相关阅读:
    [CSP-S模拟测试]:军训队列(DP+乱搞)
    [CSP-S模拟测试]:stone(结论+桶+前缀和+差分)
    [CSP-S模拟测试]:bird(线段树优化DP)
    [CSP-S模拟测试]:maze(二分答案+最短路)
    [CSP-S模拟测试]:优化(贪心+DP)
    uoj132/BZOJ4200/洛谷P2304 [Noi2015]小园丁与老司机 【dp + 带上下界网络流】
    Miiler-Robin素数测试与Pollard-Rho大数分解法
    hdu4336 Card Collector 【最值反演】
    loj2542 「PKUWC2018」随机游走 【树形dp + 状压dp + 数学】
    loj2540 「PKUWC2018」随机算法 【状压dp】
  • 原文地址:https://www.cnblogs.com/wqbin/p/10977703.html
Copyright © 2020-2023  润新知