1.编程实现一个类“MyFSDataInputStream”,该类继承“org.apache.hadoop.fs.FSDataInputStream”,要求如下:实现按行读取HDFS中指定文件的方法“readLine()”,如果读到文件末尾,则返回空,否则返回文件一行的文本。
import java.io.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class MyFSDataInputStream extends FSDataInputStream{
public MyFSDataInputStream(InputStream in) {
super(in);
// TODO Auto-generated constructor stub
}
public static String readline(BufferedReader br) throws IOException {
char[] data = new char[1024];
int read = -1;
int off = 0;
while((read = br.read(data,off,1))!=-1) {
if(String.valueOf(data[off+1]).equals("\n")) break;
off += 1;
}
if(off > 0) return String.valueOf(data);
else return null;
}
public static void cat(Configuration conf , String remoteFilePath) throws IOException {
FileSystem fs = FileSystem.get(conf);
Path remotePath = new Path(remoteFilePath);
FSDataInputStream in = fs.open(remotePath);
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String line = null;
while((line = MyFSDataInputStream.readline(br))!=null) {
System.out.println(line);
}
br.close();
in.close();
fs.close();
}
public static void main(String[] args) {
// TODO Auto-generated method stub
Configuration conf = new Configuration();
conf.set("fs.default.name", "hdfs://localhost:9000");
String remoteFilePath = "/user/hadoop/merge.txt";
try {
MyFSDataInputStream.cat(conf, remoteFilePath);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
}
2.查看Java帮助手册或其它资料,用“java.net.URL”和“org.apache.hadoop.fs.FsURLStreamHandlerFactory”编程完成输出HDFS中指定文件的文本到终端中。
import java.io.*;
import java.net.URL;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
public class ShowTheContent {
public void show(String remotPath) {
// TODO Auto-generated method stub
try {
FsUrlStreamHandlerFactory fsUrl = new FsUrlStreamHandlerFactory();
URL.setURLStreamHandlerFactory(fsUrl);
InputStream in = new URL("hdfs","localhost",9000,remotPath).openStream();
IOUtils.copyBytes(in, System.out, 4096,false);
IOUtils.closeStream(in);
// BufferedReader br = new BufferedReader(new InputStreamReader(in));
// String line = null;
// while ((line = br.readLine())!=null) {
// System.out.println(line);
//
// }
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
}
public static void main(String[] args) {
// TODO Auto-generated method stub
ShowTheContent s = new ShowTheContent();
s.show("/user/hadoop/merge.txt");
}
}