上传文件到HDFS:(对HDFS的操作,注意config的配置)
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class trytry {
public static void main(String[] args) {
FileSystem hdfs = null;
try {
//目标文件习,在HDFS根目录下新建一个文件夹/hadoop/temp
String destfile = "/hadoop/tmp/" + System.currentTimeMillis();
//源文件
String fsrc = "/usr/hahahah.txt";
Configuration config = new Configuration();
// 程序配置
config.set("fs.default.name", "hdfs://192.168.146.130:9000");
hdfs = FileSystem
.get(new URI("hdfs://192.168.146.130:9000"), config, "hadoop");
//hadoop是用户名
Path srcPath = new Path(fsrc);
Path destPath = new Path(destfile);
boolean delSrc = true;
hdfs.copyFromLocalFile(delSrc, srcPath, destPath);
System.out.println("It is over : " + destfile);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (hdfs != null) {
try {
hdfs.closeAll();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
}
在HDFS之中创建文件
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CreatFile {
public static void main(String args[]) throws Exception{
Configuration cof=new Configuration();
byte[] buff="hello word!".getBytes();
FileSystem hdfs=null;
cof.set("fs.default.name", "hdfs://192.168.146.130:9000");
hdfs = FileSystem
.get(new URI("hdfs://192.168.146.130:9000"), cof, "hadoop");
Path dfs=new Path("/lilan1");
FSDataOutputStream outputStream=hdfs.create(dfs);
outputStream.write(buff, 0, buff.length);
System.out.println("cdhngegb");
}
}
重命名HDFS文件的文件名:
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class rename {
public static void main(String args[]) throws Exception{
Configuration cof=new Configuration();
FileSystem hdfs=null;
cof.set("fs.default.name", "hdfs://192.168.146.130:9000");
hdfs = FileSystem
.get(new URI("hdfs://192.168.146.130:9000"), cof, "hadoop1");
Path sname=new Path("/lilan1");
Path dname=new Path("/lilan2");
hdfs.rename(sname, dname);
}
}
查看HDFS中的文件的最后修改时间:
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class finalEditTime {
public static void main(String args[]) throws Exception{
Configuration cof=new Configuration();
FileSystem hdfs=null;
cof.set("fs.default.name", "hdfs://192.168.146.130:9000");
hdfs = FileSystem
.get(new URI("hdfs://192.168.146.130:9000"), cof, "hadoop1");
Path pa=new Path("/lilan2");
FileStatus filestatus=hdfs.getFileStatus(pa);
long time=filestatus.getModificationTime();
System.out.println("The last Edit time is:"+time);
}
}