Hadoop_HDFS_API创建文件夹

前提:HDFS集群启动正常
建议:在IDEA上安装大数据插件

新建一个marven项目

并导入相应的依赖坐标+日志添加

<dependencies> 
    <dependency> 
        <groupId>org.apache.hadoop</groupId> 
        <artifactId>hadoop-client</artifactId> 
        <version>3.1.3</version> 
    </dependency> 
    <dependency> 
        <groupId>junit</groupId> 
        <artifactId>junit</artifactId> 
        <version>4.12</version> 
    </dependency> 
    <dependency> 
        <groupId>org.slf4j</groupId> 
        <artifactId>slf4j-log4j12</artifactId> 
        <version>1.7.30</version> 
    </dependency> 
</dependencies>

在项目的src/main/resources目录下,新建一个文件,命名为“log4j.properties”,在文件中填入

log4j.rootLogger=INFO, stdout   
log4j.appender.stdout=org.apache.log4j.ConsoleAppender   
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout   
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n   
log4j.appender.logfile=org.apache.log4j.FileAppender   
log4j.appender.logfile.File=target/spring.log   
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout   
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n

创建包:org.cheetah.hdfs
在包内创建 HdfsClient 类

写入以下代码

package org.cheetah.hdfs;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
//import java.nio.file.FileSystem;
//import java.nio.file.Path;

//import javax.security.auth.login.Configuration;

public class HdfsClient {

    private FileSystem fs;
    @Before
    public void init() throws URISyntaxException, IOException, InterruptedException {
        // 1 获取文件系统
        Configuration configuration = new Configuration();

        // FileSystem fs = FileSystem.get(new URI("hdfs://hadoop102:8020"), configuration);

        fs = FileSystem.get(new URI("hdfs://hadoop102:8020"), configuration,"cheetah");
    }
    @After
    public void close() throws IOException {
        // 3 关闭资源
        fs.close();
    }
    @Test
    public void testMkdirs() throws IOException, URISyntaxException, InterruptedException {

        // 2 创建目录
        fs.mkdirs(new Path("/xiyou/huaguoshan1/"));
    }
}

执行@Test右边的命令就可创建文件夹

package org.cheetah.hdfs;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
//import java.nio.file.FileSystem;
//import java.nio.file.Path;

//import javax.security.auth.login.Configuration;

public class HdfsClient {

    //FileSystem是抽象类 不可new
    private FileSystem fs;
    @Before
    public void init() throws URISyntaxException, IOException, InterruptedException {
        // 1 获取文件系统
        Configuration configuration = new Configuration();

        // FileSystem fs = FileSystem.get(new URI("hdfs://hadoop102:8020"), configuration);

        fs = FileSystem.get(new URI("hdfs://hadoop102:8020"), configuration,"cheetah");
    }
    @After
    public void close() throws IOException {
        // 3 关闭资源
        fs.close();
    }
    @Test
    public void testMkdirs() throws IOException, URISyntaxException, InterruptedException {

        // 2 创建目录
        fs.mkdirs(new Path("/xiyou/huaguoshan4/"));
    }

    //上传操作
    @Test
    public void testPut() throws IOException {
        //参数解读:参数一:是否删除本地源数据
        //参数二:是否允许覆盖
        //参数三四 源数据路径和目标地址路径
        fs.copyFromLocalFile(false,true,new Path("X:\\sunwukong.txt"),new Path("/xiyouji/huaguoshan2/sunwukong.txt"));
    }
}

posted @ 2021-03-10 17:52  莳曳  阅读(618)  评论(0)    收藏  举报