文章目录
read from hdfs
@description: 从hdfs的文件读取数据到本地d盘的文本文件中,使用IOUtils.copyBytes(),这个方法比较通用:
public class ReadTest {
private static final String HDFS_SERVER = "hdfs://192.168.17.151:8020";
public static void main(String[] args) {
System.setProperty("hadoop.home.dir","D://hadoop-2.7.7");
System.setProperty("HADOOP_USER_NAME", "root");
ReadTest readTest = new ReadTest();
readTest.readFromHdfs();
}
public void readFromHdfs(){
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://192.168.17.151:8020");
try {
FileSystem fs = FileSystem.get(conf);
FSDataInputStream fsDataInputStream = fs.open(new Path(HDFS_SERVER + "/wordcount_input"));
File file = new File("d://upFiles/wordcount-input.txt");
FileOutputStream fileOutputStream = new FileOutputStream(file);
IOUtils.copyBytes(fsDataInputStream,fileOutputStream,4096,true);
} catch (IOException e) {
e.printStackTrace();
}
}
}
write to hdfs
@description: 写数据到HDFS文件,直接使用FSDataOutputStream.write()方法写:
public class WriteTest {
private static final String HDFS_SERVER = "hdfs://192.168.17.151:8020";
public static void main(String[] args) throws IOException {
System.setProperty("HADOOP_USER_NAME","root");
System.setProperty("hadoop.home.dir", "D://hadoop-2.7.7");
Configuration conf = new Configuration();
conf.set("fs.defaultFS", HDFS_SERVER);
FileSystem fileSystem = FileSystem