编辑Name为IsFile,用于创建名为IsFile的Java类。该文件的目的就是判断HDFS文件中是否存在一个文件或者路径。
package com.huawei.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import java.io.IOException; public class IsFile { public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); //conf.set("fs.defaultFS", "hdfs:// master:9000"); FileSystem fs = FileSystem.get(conf); Path path1 = new Path("/user/test/hdfs"); Path path2 = new Path("/user/test/hdfs/file10.txt"); //判断目录是否存在 if (fs.exists(path1)) { System.out.println(path1+"目录已经存在"); }else{ System.out.println(path1+"目录不存在"); } //判断文件是否存在 if (fs.exists(path2)) { System.out.println(path2+"文件已经存在"); }else{ System.out.println(path2+"文件不存在"); } fs.close(); } }
用同样的方法,在com.huawei.hdfs包下面创建名为CreateFile1的Java Class文件,用于在HDFS上创建一个空白文件。
package com.huawei.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import java.io.IOException; /** * 创建空白文件 */ public class CreateFile1 { public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); //conf.set("fs.defaultFS", "hdfs:// master:9000"); FileSystem fs = FileSystem.get(conf); boolean create = fs.createNewFile(new Path("/user/test/hdfs/file10.txt")); System.out.println(create ? "创建成功":"创建失败,文件已经存在"); fs.close(); } }
在com.huawei.hdfs包下面创建名为CreateFile2的Java Class来实现这一目标。
package com.huawei.hdfs;
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;
import java.io.BufferedWriter;import java.io.IOException;import java.io.OutputStreamWriter;
/**
* 创建文件的同时向文件里面写入数据
*/public class CreateFile2 {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
//conf.set("fs.defaultFS", "hdfs:// master:9000");
FileSystem fs = FileSystem.get(conf);
Path path = new Path("/user/test/hdfs/file11.txt");
FSDataOutputStream dos = fs.create(path);
System.out.println(path+"文件创建成功");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(dos));
bw.write("huawei");
bw.newLine();
bw.write("bigdata");
bw.newLine();
bw.write("java");
bw.newLine();
bw.close();
dos.close();
fs.close();
}
}
在com.huawei.hdfs包下面创建名为ScanFile的Java Class来实现查看HDFS文件系统中某个文件的内容。
package com.huawei.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; public class ScanFile { public static void main(String[] args) throws IOException { Configuration conf = new Configuration(); //conf.set("fs.defaultFS", "hdfs://master:9000"); FileSystem fs = FileSystem.get(conf); Path path = new Path(args[0]); FSDataInputStream red = fs.open(path); BufferedReader bf = new BufferedReader(new InputStreamReader(red)); String line = null; while ((line = bf.readLine()) != null){ System.out.println(line); } bf.close(); red.close(); fs.close(); } }
在com.huawei.hdfs包下面创建名为D