package hdfsTest.answer.hdfs;
import java.io.IOException;
import java.net.URI;
//import java.net.URLDecoder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HDFSTest {
public static void main(String[] args) throws IOException {
System.out.println("execute start!!!");
String file = "hdfs://hadoop1:9000/linjm/hdfsTest.txt";
String str = "hello world 你好 HA Welcome!!!\n Java.\n";
HDFSTest t = new HDFSTest();
t.writeFile(file, str);
t.readFile(file);
System.out.println("execute end!!!");
}
public void readFile(String file) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(file), conf);
FSDataInputStream hdfsIS = fs.open(new Path(file));
byte[] ioBuffer = new byte[1024];
int readLen = hdfsIS.read(ioBuffer);
while (readLen != -1) {
System.out.write(ioBuffer, 0, readLen);
readLen = hdfsIS.read(ioBuffer);
}
hdfsIS.close();
fs.close();
}
public void writeFile(String file, String str) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(file), conf);
System.out.println("路径是否存在:" + fs.exists(new Path(file)));
FSDataOutputStream hdfsOS = fs.create(new Path(file));
// hdfsOS.writeChars(URLDecoder.decode(str, "UTF-8"));
hdfsOS.write(str.getBytes(), 0, str.getBytes().length);
hdfsOS.close();
fs.close();
}
}