hdfs java操作

1.读出hadoop目录下的hello文件:

方案一(只能读不能写):

import java.net.MalformedURLException;
import java.net.URL;
import java.io.*;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;

public class App1 {
public static final String HDFS_PATH="hdfs://mlj:9000/hello";
public static void main(String[] args) throws Exception {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());//url解析器,url通常情况下只识别http,在这里声明是为了让它识别hdfs协议
final URL url=new URL(HDFS_PATH);
final InputStream in=url.openStream();
IOUtils.copyBytes(in, System.out, 1024, true);//输入流,输出流,缓冲区大小,是否关闭

}

}

----------------------------------

方案二(使用hdfs提供的filesytem类可进行所有操作)alt+shift+m  alt+shift+l:

package hdfs;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class App2 {

/**
* @param args
*/
public static final String HDFS_PATH="hdfs://mlj:9000/hello";
public static final String DIR_PATH="/d";
public static final String FILE_PATH="/d1";
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
final FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());//获得对象
//创建文件夹
//mkdir(fileSystem);
//上传文件(欲上传,先创建)
//uploadFile(fileSystem);
//下载文件(欲下载,先打开)
//downloadData(fileSystem);
//删除文件
//deleteFile(fileSystem);
//浏览文件夹
list(fileSystem);
}
private static void list(FileSystem fileSystem) throws IOException {
final FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
for (FileStatus fileStatus : listStatus) {
String isDir = fileStatus.isDir()?"文件夹":"文件";
final String permission = fileStatus.getPermission().toString();
final short replication = fileStatus.getReplication();
final long len = fileStatus.getLen();
final String path = fileStatus.getPath().toString();
System.out.println(isDir+"\t"+permission+"\t"+replication+"\t"+len+"\t"+path);
}
}
private static void deleteFile(final FileSystem fileSystem)
throws IOException {
fileSystem.delete(new Path(FILE_PATH), true);
}
private static void downloadData(final FileSystem fileSystem)
throws IOException {
final FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
IOUtils.copyBytes(in, System.out, 1024, true);
}
private static void mkdir(final FileSystem fileSystem) throws IOException {
fileSystem.mkdirs(new Path(DIR_PATH));
}
private static void uploadFile(final FileSystem fileSystem)
throws IOException, FileNotFoundException {
final FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));//create生成一个输出流,文件目的地
final FileInputStream in = new FileInputStream("D:/b.txt");
IOUtils.copyBytes(in, out, 1024, true);
}

}

----------------------

package hdfs;

import java.net.MalformedURLException;
import java.net.URL;
import java.io.*;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;

public class App1 {
	public static final String HDFS_PATH="hdfs://mlj:9000/hello";
	public static void main(String[] args) throws Exception {
	URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
		final URL url=new URL(HDFS_PATH);
		final InputStream in=url.openStream();
		IOUtils.copyBytes(in, System.out, 1024, true);

	}

}

  

package hdfs;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class App2 {

	/**
	 * @param args
	 */
	public static final String HDFS_PATH="hdfs://mlj:9000/hello";
	public static final String DIR_PATH="/d";
	public static final String FILE_PATH="/d1";
	public static void main(String[] args) throws Exception {
		// TODO Auto-generated method stub
final FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
//创建文件夹
//mkdir(fileSystem);
//上传文件(欲上传,先创建)
//uploadFile(fileSystem);
//下载文件(欲下载,先打开)
//downloadData(fileSystem);
//删除文件
//deleteFile(fileSystem);
//浏览文件夹
list(fileSystem);
	}
	private static void list(FileSystem fileSystem) throws IOException {
		final FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
		for (FileStatus fileStatus : listStatus) {
			String isDir = fileStatus.isDir()?"文件夹":"文件";
			final String permission = fileStatus.getPermission().toString();
			final short replication = fileStatus.getReplication();
			final long len = fileStatus.getLen();
			final String path = fileStatus.getPath().toString();
			System.out.println(isDir+"\t"+permission+"\t"+replication+"\t"+len+"\t"+path);
		}
	}	
	private static void deleteFile(final FileSystem fileSystem)
			throws IOException {
		fileSystem.delete(new Path(FILE_PATH), true);
	}
	private static void downloadData(final FileSystem fileSystem)
			throws IOException {
		final FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
		IOUtils.copyBytes(in, System.out, 1024, true);
	}
	private static void mkdir(final FileSystem fileSystem) throws IOException {
		fileSystem.mkdirs(new Path(DIR_PATH));
	}
	private static void uploadFile(final FileSystem fileSystem)
			throws IOException, FileNotFoundException {
		final FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));//create生成一个输出流,文件目的地
		final FileInputStream in = new FileInputStream("D:/b.txt");
		IOUtils.copyBytes(in, out, 1024, true);
	}

}

  

posted @ 2015-05-05 11:09  孟想阳光  阅读(256)  评论(0)    收藏  举报