HDFS常用API(1)
2021-06-11 03:07
                         标签:uri   iterator   完成   system   ati   param   dfsr   ica   配置信息    一、HDFS集群API所需要jar包的maven配置信息 二、从HDFS下载数据文件/上传文件到HDFS文件系统   思路:1.获取配置信息      2.设置配置信息(块大小、副本数)      3.构造客户端      4.下载数据文件/上传数据文件      5.关闭资源   (1)下载文件   (2)上传文件   三、对HDFS系统进行操作的API    四、   HDFS常用API(1) 标签:uri   iterator   完成   system   ati   param   dfsr   ica   配置信息    原文地址:https://www.cnblogs.com/HelloBigTable/p/10581676.html        
/**
 * @author: PrincessHug
 * @date: 2019/3/18, 16:10
 * @Blog: https://www.cnblogs.com/HelloBigTable/
 */
public class HdfsClientDemo02 {
    public static void main(String[] args) throws URISyntaxException, IOException, InterruptedException {
        //配置信息对象
        Configuration conf = new Configuration();
        //设置具体配置信息
        conf.set("dfs.replication","2");
        //构造客户端
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.126.128:9000/"), conf, "root");
        //下载数据到本地
        fs.copyToLocalFile(new Path("/words1.txt"),new Path("f://words1.txt"));
        //关闭资源
        fs.close();
        System.out.println("下载完成");
    }
}
/**
 * @author: PrincessHug
 * @date: 2019/3/18, 11:53
 * @Blog: https://www.cnblogs.com/HelloBigTable/
 */
public class HdfsClientDemo01 {
    public static void main(String[] args) throws URISyntaxException, IOException, InterruptedException {
        //配置信息
        Configuration conf = new Configuration();
        //配置块大小和副本数
        conf.set("dfs.blocksize","64m");
        conf.set("dfs.replication","2");
        //构造客户端
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.126.128:9000/"), conf, "root");
        //上传文件到hdfs客户端
        fs.copyFromLocalFile(new Path("/root/love.tsv"),new Path("/love1.tsv"));
        //关闭资源
        fs.close();
        System.out.println("上传成功!");
    }
}
/**
 * @author: PrincessHug
 * @date: 2019/3/18, 16:16
 * @Blog: https://www.cnblogs.com/HelloBigTable/
 */
public class HdfsClientDemo {
    private static FileSystem fs = null;
    static {
        Configuration conf = new Configuration();
        conf.set("dfs.blocksize","64m");
        conf.set("dfs.replication","3");
        try {
            fs = FileSystem.get(new URI("hdfs://192.168.126.128:9000/"),conf,"root");
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }
    }
    /**
     * 创建文件夹方法
     * @throws IOException
     */
    public void mkDir(String path) throws IOException {
        fs.mkdirs(new Path(path));
        fs.close();
    }
    /**
     * 重命名或移动文件
     * @param path1
     * @param path2
     * @throws IOException
     */
    public void hdfsRename(String path1,String path2) throws IOException {
        fs.rename(new Path(path1),new Path(path2));
        fs.close();
    }
    /**
     * 删除文件或文件夹
     * @param path 路径
     * @throws IOException
     */
    public void delete(String path) throws IOException {
        fs.delete(new Path(path),true);
        fs.close();
    }
    /**
     * 列出hdfs指定的目录信息
     * @param path
     * @throws IOException
     */
    public void list(String path) throws IOException {
        RemoteIterator
上一篇:c# 遍历目录