package hadoopshiyan;
import org.apache.hadoop.fs.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class hadoopduqu extends FSDataInputStream {
private static hadoopduqu myFSDataInputStream;
private static InputStream inputStream;
private hadoopduqu(InputStream in) {
super(in);
inputStream = in;
}
public static hadoopduqu getInstance(InputStream inputStream){
if (null == myFSDataInputStream){
synchronized (hadoopduqu.class){
if (null == myFSDataInputStream){
myFSDataInputStream = new hadoopduqu(inputStream);
}
}
}
return myFSDataInputStream;
}
public static String readline(FileSystem fileStatus){
try {
// FSDataInputStream inputStream = fileStatus.open(remotePath);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
String line = null;
if ((line = bufferedReader.readLine()) != null){
bufferedReader.close();
inputStream.close();
return line;
}
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
package hadoopshiyan;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.fs.Path;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
public class shuchu {
private Path remotePath;
private FileSystem fileSystem;
public shuchu(FileSystem fileSystem, Path remotePath){
this.fileSystem = fileSystem;
this.remotePath = remotePath;
}
public void show(){
try {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
InputStream inputStream = new URL("hdfs","localhost",8020,remotePath.toString()).openStream();
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
String line = null;
while ((line = bufferedReader.readLine()) != null){
System.out.println(line);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}