文件写入
import org.apche.hadoop.conf.Configuration;
import org.apche.hadoop.fs.FileSystem;
import org.apche.hadoop.fs.FSDataOutputStream;
import org.apche.hadoop.fs.Path;
public class HdfsWriteFile{
public static void main(String[] args){
Configuration conf = new Configurtion();
Path dfs = new Path("hdfs://localhost:9000");
FileSystem fs = dfs.getFileSystem(conf);//实例化fs对象
//定义一个写入的内容
byte[] buff = "nice day".getBytes();
//定义写入的路径
string filename = "hdfs://localhost:8000/usr/hadoop/test.txt";
//实例化一个输出流
FSDataOutputStream os = ps.create(new Path(filename));
BufferedReader d = new BudfferedReader(new OutputStreamReader(os));
os.write(buff,2,buff.length-2)//写入buffer
d.close();//关闭文件系统
fs.close();//关闭hdfs
}catch(Exception e){
e.printStackTrace(e)
}
}
文件读入
import java.io.BufferReader;
import java.io.InputStreamReader;
import org.apche.hadoop.conf.Configuration;
import org.apche.hadoop.fs.FileSystem;
import org.apche.hadoop.fs.Path;
import org.apche.hadoop.fs.FSDataInputStream;
public class HDFSReadFile{
public static void main(String[] args){
Configuration conf = new Configurtion();
Path dfs = new Path("hdfs://localhost:9000");
FileSystem fs = dfs.getFileSystem(conf);//实例化fs对象
//构建读取路径
Path fileName = new Path("hdfs://localhost:8000/usr/hadoop/test.txt");
//实例化一个输入流
FSDataInputStream is = ps.open(new Path(filename));
BufferedReader d = new BudfferedReader(new InputStreamReader(is));
String content = d.readLine();
System.out.print(content);
d.close();//关闭文件系统
fs.close();//关闭hdfs
}catch(Exception e){
e.printStackTrace(e)
}
}