• 编程实现hdfs对文件的操作


    课堂测试1

    1.

    2.

    3.

    package hadoop;
    
    import java.io.*;
     
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.*;
     
     
    public class hdfs {
        //路径是否存在
        public static boolean test(Configuration conf,String path) throws  IOException
        {
            FileSystem fs=FileSystem.get(conf);
            return fs.exists(new Path(path));
        }
        //创建目录
        public static boolean mkdir (Configuration conf ,String remoteDir)throws IOException
        {
        FileSystem fs=FileSystem.get(conf);
        Path dirPath=new Path(remoteDir);
        boolean result=fs.mkdirs(dirPath);
        fs.close();
        return result;
        }
        //创建文件
        public static void touchz(Configuration conf,String remoteFilePath )throws IOException
        {
            FileSystem fs=FileSystem.get(conf);
            Path remotePath=new Path(remoteFilePath);
            FSDataOutputStream outputStream =fs.create(remotePath);
            outputStream.close();
            fs.close();
        }
        //删除文件
        public static boolean rm(Configuration conf,String remoteFilePath)throws IOException
        {
            FileSystem fs=FileSystem.get(conf);
            Path remotePath=new Path(remoteFilePath);
            boolean result=fs.delete(remotePath,false);
            fs.close();
            return result;
        }
        //读文件
        public static void cat(Configuration conf,String FilePath)throws IOException
        {
            FileSystem fs=FileSystem.get(conf);
            Path file=new Path(FilePath);
            FSDataInputStream getIt=fs.open(file);
            BufferedReader d=new BufferedReader(new InputStreamReader(getIt));
            String content=d.readLine();
            System.out.println(content);
            d.close();
            fs.close();
        }
        //追加文件内容
        public static void appendContentToFile(Configuration conf,String content,String remoteFilePath)throws IOException
        {
            FileSystem fs=FileSystem.get(conf);
            Path remotePath=new Path(remoteFilePath);
            FSDataOutputStream out=fs.append(remotePath);
            out.write(content.getBytes());
            out.close();
            fs.close();
        }
        //将文件1写入文件2
        public static void appendContentToFile2(Configuration conf,String  remoteFilePath,String remoteFilePath2)throws IOException
        {
            FileSystem fs=FileSystem.get(conf);
            Path file=new Path(remoteFilePath);
            FSDataInputStream  getIt=fs.open(file);
            BufferedReader d=new BufferedReader(new InputStreamReader(getIt));
            String content1=d.readLine();
            
            Path remotePath=new Path(remoteFilePath2);
            FSDataOutputStream out=fs.append(remotePath);
            out.write(content1.getBytes());
            d.close();
            out.close();
            fs.close();
        }
        
        public static void main(String[] args)
        {
            Configuration conf=new Configuration();
            conf.set("dfs.client.block.write.replace-datanode-on-failure.policy","NEVER");
     
            conf.set("dfs.client.block.write.replace-datanode-on-failure.enable","true");
            conf.set("fs.default.name", "hdfs://localhost:9000");
            conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
            String remoteDir ="/user/hadoop/ZMH";//HDFS目录
            String remoteFilePath="/user/hadoop/ZMH/hdfstest1.txt";
            String remoteFilePath2="/user/hadoop/ZMH/hdfstest2.txt";
            String content="赵墨涵课堂测试";
            //创建目录
            try {
                if(!hdfs.test(conf, remoteDir))
                {
                    hdfs.mkdir(conf, remoteDir);
                    System.out.println("创建目录"+remoteDir);
                }
                else
                {
                    System.out.println(remoteDir+"目录已存在");
                }
        //创建文件
                if(!hdfs.test(conf, remoteFilePath))
                {
                    hdfs.touchz(conf, remoteFilePath);
                    System.out.println("创建文件"+remoteFilePath);
                }
                else
                {
                    System.out.println(remoteFilePath+"已存在");
                }
                        //向文件内输入
                hdfs.appendContentToFile(conf, content, remoteFilePath);
                hdfs.cat(conf, remoteFilePath);
                //创建文件2
                if(!hdfs.test(conf, remoteFilePath2))
                {
                    hdfs.touchz(conf, remoteFilePath2);
                    System.out.println("创建文件"+remoteFilePath2);
                }
                else
                {
                    System.out.println(remoteFilePath2+"已存在");
                }
                //向文件2内写入
                hdfs.appendContentToFile2(conf, remoteFilePath, remoteFilePath2);
                hdfs.cat(conf, remoteFilePath2);
            } catch (IOException e) {
            e.printStackTrace();
            }
        }
    }
  • 相关阅读:
    Linux系统介绍(二)文件系统结构
    为Docker Swarm添加调度策略
    Docker 三剑客之 Docker Swarm
    Hadoop中文文档
    hadoop 学习笔记:mapreduce框架详解
    Java进阶-- GUI
    ceph(8)--关于Ceph PGs
    今天在学习NTP时发现了2个网站
    开始学红帽的RHCE课堂有2次课了,要记下自己的学习经历
    Switch能否用string做参数
  • 原文地址:https://www.cnblogs.com/zmh-980509/p/11553665.html
Copyright © 2020-2023  润新知