创建HDFS目录

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class MakeDir {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path path = new Path("/user/hadoop/data/20130709");
fs.create(path);
fs.close();
}
}

删除HDFS目录

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class DeleteDir {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);

Path path = new Path("/user/hadoop/data/20130710");
fs.delete(path);
fs.close();
}
}

 

HDFS写文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class WriteFile {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path path = new Path("/user/hadoop/data/write.txt");
FSDataOutputStream out = fs.create(path);
out.writeUTF("da jia hao,cai shi zhen de hao!");
fs.close();
}
}

 

HDFS读文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class ReadFile {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path path = new Path("/user/hadoop/data/write.txt");

if(fs.exists(path)){
FSDataInputStream is = fs.open(path);
FileStatus status = fs.getFileStatus(path);
byte[] buffer = new byte[Integer.parseInt(String.valueOf(status.getLen()))];
is.readFully(0, buffer);
is.close();
fs.close();
System.out.println(buffer.toString());
}
}
}

上传本地文件到HDFS

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class CopyFromLocalFile {

public static void main(String[] args) throws IOException {

Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path src = new Path("/home/hadoop/word.txt");
Path dst = new Path("/user/hadoop/data/");
fs.copyFromLocalFile(src, dst);
fs.close();
}
}

 

删除文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class DeleteFile {

public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);

Path path = new Path("/user/hadoop/data/word.txt");
fs.delete(path);
fs.close();
}
}

 

获取给定目录下所有子目录和子文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class GetAllChildFile {
static Configuration conf = new Configuration();


public static void main(String[] args)throws IOException {
FileSystem fs = FileSystem.get(conf);
Path path = new Path("/user/hadoop");
getFile(path,fs);
//fs.close();
}

public static void getFile(Path path,FileSystem fs) throws IOException {

FileStatus[] fileStatus = fs.listStatus(path);
for(int i=0;i<fileStatus.length;i++){
if(fileStatus[i].isDir()){
Path p = new Path(fileStatus[i].getPath().toString());
getFile(p,fs);
}else{
System.out.println(fileStatus[i].getPath().toString());
}
}
}

}

查找某个文件在HDFS 的位置

package com.hadoop.file;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

public class FindFile {

public static void main(String[] args) throws IOException {
getHDFSNode();
}

/**
* HDFS集群上所有节点名称信息
* @Title:
* @Description:
* @param
* @return
* @throws
*/
public static void getHDFSNode() throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);

DistributedFileSystem dfs = (DistributedFileSystem)fs;
DatanodeInfo[] dataNodeStats = dfs.getDataNodeStats();

for(int i=0;i<dataNodeStats.length;i++){
System.out.println("DataNode_" + i + "_Node:" + dataNodeStats[i].getHostName());
}

}
}

HDFS所有节点信息

package com.hadoop.file;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

public class FindFile {

public static void main(String[] args) throws IOException {
getHDFSNode();
}

/**
* HDFS集群上所有节点名称信息
* @Title:
* @Description:
* @param
* @return
* @throws
*/
public static void getHDFSNode() throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);

DistributedFileSystem dfs = (DistributedFileSystem)fs;
DatanodeInfo[] dataNodeStats = dfs.getDataNodeStats();

for(int i=0;i<dataNodeStats.length;i++){
System.out.println("DataNode_" + i + "_Node:" + dataNodeStats[i].getHostName());
}
}
}

 

 

posted on 2014-04-17 12:10  老梁  阅读(159)  评论(0)    收藏  举报