HDFS HA连接和常用API操作

public class HDFSTest {
    FileSystem fileSystem;

    @Before
    public void setUp() throws IOException {
        Configuration configuration = new Configuration();
        configuration.set("fs.defaultFS", "hdfs://ns1");
        configuration.set("dfs.nameservices", "ns1");
        configuration.set("dfs.ha.namenodes.ns1", "namenode40,namenode413");
        configuration.set("dfs.namenode.rpc-address.ns1.namenode413", "bigdata-2.baofoo.cn:8020");
        configuration.set("dfs.namenode.rpc-address.ns1.namenode40", "bigdata-1.baofoo.cn:8020");
        configuration.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser("hdfs"));
        fileSystem = FileSystem.newInstance(configuration);
    }

    @Test
    public void test() throws IOException {
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/user"));
        for (FileStatus fileStatus : fileStatuses) {
            Path path = fileStatus.getPath();
            System.out.println(path.getName());
        }
    }
}

复制

FileUtil.copy(fileSystem, new Path("/user/xiaoming"), fileSystem, 
new Path("/user/hive/xiaoming"), false, true, configuration);

设置所属人

fileSystem.setOwner(new Path("/user/xiaoming/"), "xiaoming", "group_1");

设置权限

fileSystem.setPermission(new Path("/user/xiaoming/"), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
posted @ 2020-06-29 19:07  ytsee  阅读(79)  评论(0)    收藏  举报