package zq;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class Write {
public static void main(String[] args) throws IOException {
Configuration conf=new Configuration();
conf.set("fs.default.name", "hdfs://localhost:9000");
Path inFile =new Path("/user/hadoop/hadoopfile/t1");
FileSystem hdfs=FileSystem.get(conf);
FSDataOutputStream outputStream=hdfs.create(inFile);
outputStream.writeUTF("china cstor cstor china");
outputStream.flush();
outputStream.close();
}
}
输出HDFS里刚写入文件的内容:
package output;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class Read {
public static void main(String[] args) throws IOException {
Configuration conf=new Configuration();
conf.set("fs.default.name", "hdfs://localhost:9000");
Path inFile =new Path("/user/hadoop/hadoopfile/t1");
FileSystem hdfs=FileSystem.get(conf);
FSDataInputStream inputStream=hdfs.open(inFile);
System.out.println("myfile:"+inputStream.readUTF());
inputStream.close();
}
}