package com.xjj.test;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/*
* 从hdfs读取文件程序
*/
public class TongWordsTest {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
//设置core-site.xml配置--这步很重要
conf.set("fs.defaultFS", "hdfs://192.168.126.133:8020");
FileSystem fs = FileSystem.get(conf);
Path path = new Path("hdfs://192.168.126.133:8020/user/input/text1.txt");
if (fs.exists(path)) {
System.out.println("Exists!");
try {
//此为hadoop读取数据类型
FSDataInputStream is = fs.open(path);
InputStreamReader inputStreamReader=new InputStreamReader(is,"utf-8");
String line=null;
//把数据读入到缓冲区中
BufferedReader reader = new BufferedReader(inputStreamReader);
//从缓冲区中读取数据
while((line=reader.readLine())!=null){
System.out.println("line="+line);
}
} catch (Exception e) {
System.out.println(e);
}
}
else {
System.out.println("不存在");
}
}
}