• Hadoop 学习笔记(二) HDFS API


    4.删除HDFS上的文件
    package proj;
    
    import java.io.IOException;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    
    public class DeleteFile {
    
        public static void main(String[] args) throws IOException {
            Configuration conf = new Configuration();
            //要点:没有这句会传到本地文件系统,而不是hdfs
            conf.set("fs.default.name","hdfs://localhost:9000");
            FileSystem hdfs = FileSystem.get(conf);
            Path delef = new Path("in/test3.txt");
            boolean isDeleted = hdfs.delete(delef, false);
            //递归删除
            //boolean isDelete = hdfs.delete(delef, true);
            System.out.println("delete? "+ isDeleted);
        }
    
    }
    检查文件是否存在
    package
    proj; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class CheckFile { public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); //要点:没有这句会传到本地文件系统,而不是hdfs conf.set("fs.default.name","hdfs://localhost:9000"); FileSystem hdfs = FileSystem.get(conf); Path findf = new Path("in/hello.c"); boolean exist = hdfs.exists(findf); System.out.println("exist ? " + exist); } }
    查找某个文件在HDFS集群的位置
    package
    proj; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class FileLocal { public static void main(String[] args) throws IOException{ Configuration conf = new Configuration(); //要点:没有这句会传到本地文件系统,而不是hdfs conf.set("fs.default.name","hdfs://localhost:9000"); FileSystem hdfs = FileSystem.get(conf); Path fpath = new Path("in/hello.c"); FileStatus filestatus = hdfs.getFileStatus(fpath); BlockLocation[] blkLocations = hdfs.getFileBlockLocations(filestatus, 0, filestatus.getLen()); int blockLen = blkLocations.length; for (int i = 0; i < blockLen; i++) { String[] hosts = blkLocations[i].getHosts(); System.out.println("block "+i+" location "+ hosts[i]); } } }
    获取HDFS集群上所有节点名称
    package
    proj; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; public class GetList { public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); //要点:没有这句会传到本地文件系统,而不是hdfs conf.set("fs.default.name","hdfs://localhost:9000"); FileSystem fs = FileSystem.get(conf); DistributedFileSystem hdfs = (DistributedFileSystem)fs; DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats(); String[] names = new String[dataNodeStats.length]; for (int i = 0; i < dataNodeStats.length; i++) { names[i] = dataNodeStats[i].getHostName(); System.out.println("node " + i + " name " + names[i]); } } }
  • 相关阅读:
    54、servlet3.0-ServletContainerInitializer
    53、servlet3.0-简介&测试
    52、[源码]-Spring源码总结
    51、[源码]-Spring容器创建-容器创建完成
    50、[源码]-Spring容器创建-Bean创建完成
    49、[源码]-Spring容器创建-创建Bean准备
    48、[源码]-Spring容器创建-初始化事件派发器、监听器等
    Atom编辑器入门到精通(四) Atom使用进阶
    Atom编辑器入门到精通(三) 文本编辑基础
    Atom编辑器入门到精通(二) 插件的安装和管理
  • 原文地址:https://www.cnblogs.com/i80386/p/3439132.html
Copyright © 2020-2023  润新知