• 啦啦啦


    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.*;
    import org.apache.hadoop.io.IOUtils;
    import org.apache.hadoop.util.Progressable;
    import org.junit.After;
    import org.junit.Before;
    import org.junit.Test;

    import java.io.BufferedInputStream;
    import java.io.BufferedReader;
    import java.io.BufferedWriter;
    import java.io.File;
    import java.io.FileInputStream;
    import java.io.InputStream;
    import java.io.InputStreamReader;
    import java.io.OutputStreamWriter;
    import java.net.URI;

    /**
    * Hadoop HDFS Java API 操作
    */
    public class HDFSApp {

    public static final String HDFS_PATH = "hdfs://192.168.223.132:9000";

    FileSystem fileSystem = null;
    Configuration configuration = null;


    /**
    * 创建HDFS目录
    */
    @Test
    public void mkdir() throws Exception {
    fileSystem.mkdirs(new Path("/kr"));
    }

    /**
    * 创建文件
    */
    @Test
    public void create() throws Exception {
    FSDataOutputStream output = fileSystem.create(new Path("/kr/hdfstest1.txt"));
    output.write("1605-1 123456 HDFS".getBytes());
    output.flush();
    output.close();
    }

    /**
    * 查看HDFS文件的内容
    */
    @Test
    public void cat() throws Exception {
    FSDataInputStream fin = fileSystem.open(new Path("/hdfsapi/test/hdfstest2.txt"));
    BufferedReader in = new BufferedReader(new InputStreamReader(fin, "UTF-8"));
    System.out.println(in.readLine());
    in.close();
    }


    @Test
    public void copy() throws Exception {
    FSDataInputStream fin = fileSystem.open(new Path("/kr/hdfstest1.txt"));
    BufferedReader in = new BufferedReader(new InputStreamReader(fin, "UTF-8"));
    FSDataOutputStream fout = fileSystem.create(new Path("/hdfsapi/test/hdfstest2.txt"));
    BufferedWriter out = new BufferedWriter(new OutputStreamWriter(fout, "UTF-8"));
    out.write(in.readLine());
    out.flush();
    out.close();
    }

    /**
    * 重命名
    */
    @Test
    public void rename() throws Exception {
    Path oldPath = new Path("/hdfsapi/test/a.txt");
    Path newPath = new Path("/hdfsapi/test/b.txt");
    fileSystem.rename(oldPath, newPath);
    }

    /**
    * 上传文件到HDFS
    *
    * @throws Exception
    */
    @Test
    public void copyFromLocalFile() throws Exception {
    Path localPath = new Path("E:/data/input.txt");
    Path hdfsPath = new Path("/hdfsapi/test");
    fileSystem.copyFromLocalFile(localPath, hdfsPath);
    }

    /**
    * 上传文件到HDFS
    */
    @Test
    public void copyFromLocalFileWithProgress() throws Exception {
    InputStream in = new BufferedInputStream(
    new FileInputStream(
    new File("/Users/rocky/source/spark-1.6.1/spark-1.6.1-bin-2.6.0-cdh5.5.0.tgz")));

    FSDataOutputStream output = fileSystem.create(new Path("/hdfsapi/test/spark-1.6.1.tgz"),
    new Progressable() {
    public void progress() {
    System.out.print("."); //带进度提醒信息
    }
    });


    IOUtils.copyBytes(in, output, 4096);
    }


    /**
    * 下载HDFS文件
    */
    @Test
    public void copyToLocalFile() throws Exception {
    Path localPath = new Path("/Users/rocky/tmp/h.txt");
    Path hdfsPath = new Path("/hdfsapi/test/hello.txt");
    fileSystem.copyToLocalFile(hdfsPath, localPath);
    }

    /**
    * 查看某个目录下的所有文件
    */
    @Test
    public void listFiles() throws Exception {
    FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/"));

    for(FileStatus fileStatus : fileStatuses) {
    String isDir = fileStatus.isDirectory() ? "文件夹" : "文件";
    short replication = fileStatus.getReplication();
    long len = fileStatus.getLen();
    String path = fileStatus.getPath().toString();

    System.out.println(isDir + " " + replication + " " + len + " " + path);
    }

    }

    /**
    * 删除
    */
    @Test
    public void delete() throws Exception{
    fileSystem.delete(new Path("/"), true);
    }


    @Before
    public void setUp() throws Exception {

    configuration = new Configuration();
    fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "keke");
    System.out.println("HDFSApp.setUp");
    }

    @After
    public void tearDown() throws Exception {
    configuration = null;
    fileSystem = null;

    System.out.println("HDFSApp.tearDown");
    }

    }

  • 相关阅读:
    86. Partition List
    2. Add Two Numbers
    55. Jump Game
    70. Climbing Stairs
    53. Maximum Subarray
    64. Minimum Path Sum
    122. Best Time to Buy and Sell Stock II
    以场景为中心的产品设计方法
    那些产品经理犯过最大的错
    Axure教程:如何使用动态面板?动态面板功能详解
  • 原文地址:https://www.cnblogs.com/lijing925/p/9733039.html
Copyright © 2020-2023  润新知