文件系统:FSUtil
FSUtil_文件系统工具类
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import java.io.*;
/**
* 文件系统工具类
*/
public class FSUtil {
//获取FileSystem对象
public static FileSystem getFS() {
FileSystem fs = null;
try {
Configuration conf = new Configuration();
conf.setInt("io.file.buffer.size", 8192);
fs = FileSystem.newInstance(conf);
} catch (Exception e) {
//do noting
}
return fs;
}
//获取LocalFileSystem对象
public static LocalFileSystem getLFS() {
LocalFileSystem lfs = null;
try {
Configuration conf = new Configuration();
lfs = FileSystem.newInstanceLocal(conf);
} catch (Exception e) {
//do noting
}
return lfs;
}
//关闭文件对象
public static void closeFS(FileSystem fs) {
if (fs != null) {
try {
fs.close();
} catch (Exception e) {
// do nothing
}
}
}
/**
* 读取hdfs的文件写到console
*
* @throws IOException
*/
public static void downToConsole(FileSystem fs, String toFilePath) throws IOException {
FSDataInputStream fis = fs.open(new Path(toFilePath));
IOUtils.copyBytes(fis, System.out, 4096, true);
}
/**
* 读取hdfs的文件写到local
*/
public static void downToLocal(FileSystem fs, String fromHdfsPath, String toLocalPath) throws IOException {
FSDataInputStream fis = fs.open(new Path(fromHdfsPath));
OutputStream os = new FileOutputStream(new File(toLocalPath));
IOUtils.copyBytes(fis, os, 4096, true);
}
