昨天的试验中,完成了Hadoop的部署及简单实验,今天通过在Eclipse中编写java下载HDFS中的文件,遇到了类似如下的问题:
java.lang.IllegalArgumentException: Wrong FS: hdfs://192.168.21.133:9000/user/centos/tmp/test.txt, expected: file:///
解决此问题的方法如下:
// 下载文件
public static void getFileFromHDFS3() throws FileNotFoundException, IOException {
String dst = "hdfs://192.168.21.133:9000/user/centos/tmp/test.txt";
Configuration conf = new Configuration();
FileSystem hdfs = FileSystem.get(URI.create(dst), conf);
FSDataInputStream dis = hdfs.open(new Path(dst));
OutputStream out = new FileOutputStream("e:/test-get3.txt");
IOUtils.copyBytes(dis, out, 4096, false);
dis.close();
}
// 下载文件,此方法需要依赖本地core-site.xml和hdfs-site.xml
public static void getFileFromHDFS2() throws FileNotFoundException, IOException {
Configuration conf = new Configuration();
FileSystem hdfs = FileSystem.get(conf);
Path srcPath = new Path("hdfs://192.168.21.133:9000/user/centos/tmp/test.txt");
Path dstPath = new Path("e:/test-get2.txt");
hdfs.copyToLocalFile(srcPath, dstPath);
}
// 下载文件
private static void getFileFromHDFS1() throws FileNotFoundException, IOException {
String dst = "hdfs://192.168.21.133:9000/user/centos/tmp/test.txt";
Configuration conf = new Configuration();
FileSystem hdfs = FileSystem.get(URI.create(dst), conf);
FSDataInputStream hdfsInStream = hdfs.open(new Path(dst));
OutputStream out = new FileOutputStream("e:/test-get1.txt");
byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while (-1 != readLen) {
out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
out.close();
hdfsInStream.close();
hdfs.close();
}
建议使用方法1或3