importjava.io.File;importjava.io.IOException;importjava.net.URI;importjava.text.ParseException;importjava.text.SimpleDateFormat;importjava.util.Date;importorg.apache.commons.logging.Log;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.fs.FSDataInputStream;importorg.apache.hadoop.fs.FSDataOutputStream;importorg.apache.hadoop.fs.FileStatus;importorg.apache.hadoop.fs.FileSystem;importorg.apache.hadoop.fs.LocatedFileStatus;importorg.apache.hadoop.fs.Path;importorg.apache.hadoop.fs.RemoteIterator;importorg.apache.hadoop.hdfs.DistributedFileSystem;importorg.apache.hadoop.hdfs.protocol.DatanodeInfo;importorg.apache.hadoop.hdfs.server.datanode.DataNode;importorg.codehaus.jettison.json.JSONException;importorg.codehaus.jettison.json.JSONObject;importorg.json.JSONArray;import org.slf4j.*;importcom.shidai.hadoop.utils.Constant;importcom.shidai.hadoop.utils.DateUtil;public classHDFSTest {private static String url =Constant.url;private static Configuration conf = newConfiguration();public static voidgetAllDataNode() {try{
FileSystem fs=FileSystem.get(conf);
DistributedFileSystem distributedfs=(DistributedFileSystem) fs;
DatanodeInfo[] datanodeInfos=distributedfs.getDataNodeStats();for (int i = 0; i < datanodeInfos.length; i++) {
System.out.println("第" + i + "个datanode:" +datanodeInfos[i].getHostName());
}
}catch(IOException e) {
e.printStackTrace();
}
}/*** 创建文件,并写入内容
*
*@paramdst
*@paramcontents
*@throwsIOException*/
public static void createFile(String dst, byte[] contents) throwsIOException {
FileSystem fs=FileSystem.get(URI.create(url), conf);
Path path= newPath(dst);
FSDataOutputStream out=fs.create(path);
out.write(contents);
out.close();
fs.close();
System.out.println("创建文件成功");
}/*** 读取文件
*
*@paramdst
*@throwsJSONException
*@throwsParseException*/
public static void readFile(String dst) throwsJSONException, ParseException {
FileSystem fs;
FSDataInputStream in;try{
fs=FileSystem.get(URI.create(url), conf);
in= fs.open(newPath(dst));byte[] ioBuffer = new byte[1024];
StringBuffer sf= newStringBuffer();int len = -1;while ((len = in.read(ioBuffer)) != -1) {//System.out.write(ioBuffer,0,len);;
String string = new String(ioBuffer, 0, len);
sf.append(string);
len=in.read(ioBuffer);
}
in.close();
fs.close();
System.out.println(sf.toString());
JSONObject json= newJSONObject(sf.toString());
Long time= json.getLong("last_time");
String sd= DateUtil.getDate(time * 1000);
System.out.println("上传时间:" +sd);
}catch(IOException e) {
e.printStackTrace();
}
}/*** 遍历文件
*
*@paramdst*/
public static voidlistFiles(String dst) {
FileSystem fs= null;try{
fs=FileSystem.get(URI.create(url), conf);
RemoteIterator iterator = fs.listFiles(new Path(dst), false);while(iterator.hasNext()) {
LocatedFileStatus locatedFileStatus=iterator.next();if(locatedFileStatus.isFile()) {
String path=locatedFileStatus.getPath().toString();
System.out.println(path);if (!path.endsWith("tmp")) {
readFile(path);
}
}
}
}catch(Exception e) {
e.printStackTrace();
}
}/*** 上传文件
*
*@paramsrc
*@paramdst*/
public static voidupload(String src, String dst) {
FileSystem fs= null;try{
fs=FileSystem.get(URI.create(url), conf);
Path srcPath= newPath(src);
Path dstPath= newPath(dst);
fs.copyFromLocalFile(false, srcPath, dstPath);//打印文件路径
System.out.println("list files");
FileStatus[] fileStatus=fs.listStatus(dstPath);for(FileStatus fstatus : fileStatus) {
System.out.println(fstatus.getPath());
}
}catch(IOException e) {
e.printStackTrace();
}finally{if (fs != null) {try{
fs.close();
}catch(IOException e) {
e.printStackTrace();
}
}
}
}/*** 删除文件
*
*@paramargs
*@throwsJSONException
*@throwsParseException*/
public static voiddelete(String dst) {
FileSystem fs= null;try{
fs=FileSystem.get(URI.create(url), conf);
Boolean flag= fs.delete(new Path(dst), false);if(flag) {
System.out.println("删除成功");
}else{
System.out.println("删除失败");
}
}catch(IOException e) {
e.printStackTrace();
}
}public static void main(String[] args) throwsJSONException, ParseException {
System.setProperty("hadoop.home.dir", "C:/Users/root/.m2/repository/org/apache/hadoop/hadoop-common/2.5.2");byte[] contents = "明月几时有...\n".getBytes();/** try{ // createFile("/user/hadoop/test/hdfs01.txt", contents);
* }catch(IOException e){ e.printStackTrace(); }*/
//getAllDataNode();//upload("F:/yun/svn/1.txt", "/user/root/");//读文件//readFile("/flume/data/FlumeData.1469543705102");//遍历文件//listFiles("/flume/");//删除文件
delete("/user/root/test");
}
}