• hadoop读取数据


    package hadoopshiyan;
    
    
    import org.apache.hadoop.fs.*;
    
    import java.io.BufferedReader;
    import java.io.IOException;
    import java.io.InputStream;
    import java.io.InputStreamReader;
    
    public class hadoopduqu extends FSDataInputStream {
    
        private static hadoopduqu myFSDataInputStream;
        private static InputStream inputStream;
    
        private hadoopduqu(InputStream in) {
            super(in);
            inputStream = in;
        }
    
        public static hadoopduqu getInstance(InputStream inputStream){
            if (null == myFSDataInputStream){
                synchronized (hadoopduqu.class){
                    if (null == myFSDataInputStream){
                        myFSDataInputStream = new hadoopduqu(inputStream);
                    }
                }
            }
            return myFSDataInputStream;
        }
    
        public static String readline(FileSystem fileStatus){
            try {
    //            FSDataInputStream inputStream = fileStatus.open(remotePath);
                BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
                String line = null;
                if ((line = bufferedReader.readLine()) != null){
                    bufferedReader.close();
                    inputStream.close();
                    return line;
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
            return null;
        }
    
    }
    package hadoopshiyan;
    
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
    import org.apache.hadoop.fs.Path;
    
    import java.io.BufferedReader;
    import java.io.IOException;
    import java.io.InputStream;
    import java.io.InputStreamReader;
    import java.net.URL;
    
    public class shuchu {
    
        private Path remotePath;
        private FileSystem fileSystem;
    
        public shuchu(FileSystem fileSystem, Path remotePath){
            this.fileSystem = fileSystem;
            this.remotePath = remotePath;
        }
    
        public void show(){
            try {
                URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
                InputStream inputStream = new URL("hdfs","localhost",8020,remotePath.toString()).openStream();
                BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
                String line = null;
                while ((line = bufferedReader.readLine()) != null){
                    System.out.println(line);
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    
    }
  • 相关阅读:
    全局数据库名称/数据库实例/SID 的区别
    【转载】ORACLE 10G DBCA创建脚本实现手动创建数据库
    apue 20130328
    apue 20130323
    visual c++6.0
    C语言
    apue 20130322
    apue 20130324
    apue 20130325
    C语言里的字符串解析
  • 原文地址:https://www.cnblogs.com/520520520zl/p/14199327.html
Copyright © 2020-2023  润新知