使用JavaAPI操作HDFS:
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;
public class Hdfsutil {
FileSystem fs = null;
@Before
public void init() throws IOException, InterruptedException, URISyntaxException{
fs =FileSystem.get(new URI("hdfs://127.0.0.1:9000"), new Configuration(), "hadoop");
}
/**
* 下载文件
* @throws IOException
* @throws IllegalArgumentException
*/
@Test
public void testDownload() throws IllegalArgumentException, IOException{
fs.copyToLocalFile(false, new Path("/user/hadoop/in/test2.log"), new Path("/home/hadoop/chinesecalendar2.log"), true);
fs.close();
}
/**
* 创建和删除文件夹
* @throws IllegalArgumentException
* @throws IOException
*/
@Test
public void testDir() throws IllegalArgumentException, IOException{
fs.mkdirs(new Path("/user/hadoop/mkdir/"));
System.out.println("make a dir");
if(fs.exists(new Path("/user/hadoop/mkdir/"))){
System.out.println("fs exist");
fs.copyFromLocalFile(new Path("/home/hadoop/chinesecalendar.log"), new Path("/user/hadoop/mkdir/test.log"));
System.out.println("upload success");
fs.delete(new Path("/user/hadoop/mkdir"), true);
System.out.println("delete success");
if(!fs.exists(new Path("/user/hadoop/mkdir"))){System.out.println("had delete and not exist");}
}else{
System.out.println("not exist");
}
fs.close();
}
/**
* 获取文件信息
* @throws IOException
* @throws IllegalArgumentException
* @throws FileNotFoundException
*/
@Test
public void testFileStatus() throws FileNotFoundException, IllegalArgumentException, IOException{
//fs.listFiles只能列出“文件”不能列出文件夹
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
while(listFiles.hasNext()){
LocatedFileStatus fileStatus = listFiles.next();
System.out.println(fileStatus.getPath().getName());
}
System.out.println("------------------");
FileStatus[] listStatus = fs.listStatus(new Path("/"));
for(FileStatus f:listStatus){
String type = "-";
if(f.isDirectory()){
type = "d";
}
System.out.println(type+"\t"+f.getPath().getName());
}
}
/**
* 获取文件块信息,改名,修改副本数
* @throws IOException
* @throws IllegalArgumentException
*
*/
@Test
public void testOthers() throws IllegalArgumentException, IOException{
BlockLocation[] fileBlockLocation = fs.getFileBlockLocations(new Path("/user/hadoop/in/test3.log"), 0, 100);
for(BlockLocation location:fileBlockLocation){
System.out.println(location.getOffset()
+"\n"+location.getLength()
+"\n"+location.getNames()[0]);
}
//fs.rename(new Path("/user/hadoop/in/test2.log"), new Path("/user/hadoop/in/test3.log"));
fs.setReplication(new Path("/user/hadoop/in/test3.log"), (short)2);
}
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
//
Configuration conf = new Configuration();
//
//conf.set("fs.defaultFS", "hdfs://127.0.0.1:9000");
//
//FileSystem fs = FileSystem.get(conf);
FileSystem fs =FileSystem.get(new URI("hdfs://127.0.0.1:9000"), conf, "hadoop");
//
fs.copyFromLocalFile(new Path("/home/hadoop/chinesecalendar.log"), new Path("/user/hadoop/in/test2.log"));
//
fs.close();
//-DHADOOP_USER_NAME=ROOT
}
}
对于程序运行是因为权限问题报错,要备注使用的用户时,有两种方式:
1.修改JVM参数:
run Configurations->VM arguments->”-DHADOOP_USER_NAME=ROOT”
2.FileSystem fs =FileSystem.get(new URI(“hdfs://127.0.0.1:9000”), conf, “hadoop”);