1.导入jar包:
包括:hdfs下的核心包和它依赖的lib包,以及commons的核心包和lib包
2.将core.site问价放在src目录下。
3.代码如下:
package com.zhichao.wan.hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;
public class Hdfs2 {
FileSystem fs=null;
@Before
public void init() throws Exception{
Configuration conf=new Configuration();
fs = FileSystem.get(conf);
}
/**
* 上传文件
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void upload() throws IllegalArgumentException, Exception{
FSDataOutputStream out = fs.create(new Path("hdfs://Centos01:9000/a1.txt"));
FileInputStream in = new FileInputStream(new File("/home/hadoop/download/a.txt"));
//1.
IOUtils.copy(in, out);
//2.
// byte[] bytes=new byte[1024];
//
// int len=0;
//
// while((len=in.read(bytes))!=-1)
// {
// out.write(bytes, 0, len);
//
// }
}
@Test
public void upload2() throws IllegalArgumentException, IOException{
fs.copyFromLocalFile(new Path("/home/hadoop/download/a.txt"),
new Path("hdfs://Centos01:9000/a3.txt"));
}
/**
* 下载文件
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void download() throws IllegalArgumentException, Exception{
FSDataInputStream in = fs.open(new Path("hdfs://Centos01:9000/a.txt"));
FileOutputStream out=new FileOutputStream("/home/hadoop/download/a2.txt");
//1
// IOUtils.copy(in, out);
//2
byte[] bytes=new byte[1024];
int len=0;
while((len=in.read(bytes))!=-1){
out.write(bytes, 0, len);
}
}
@Test
public void download2() throws IllegalArgumentException, IOException{
fs.copyToLocalFile(new Path("hdfs://Centos01:9000/a.txt"),
new Path("/home/hadoop/download/a3.txt"));
}
/**
* 创建文件夹
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void mkdir() throws IllegalArgumentException, Exception{
fs.mkdirs(new Path("/aaa"));
}
/**
* 删除
* @throws Exception
* @throws IllegalArgumentException
*/
@Test
public void rm() throws IllegalArgumentException, Exception{
fs.delete(new Path("/aaa"), true);
}
/**
* 查看文件列表
* @throws Exception
* @throws IllegalArgumentException
* @throws FileNotFoundException
*/
@Test
public void listfiles() throws FileNotFoundException, IllegalArgumentException, Exception{
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
while(listFiles.hasNext()){
LocatedFileStatus file = listFiles.next();
System.out.println(file.getPath().getName()+(file.isDirectory()?" is a directory":" is not a directory"));
}
System.out.println("#####################################");
FileStatus[] listStatus = fs.listStatus(new Path("/"));
for (FileStatus fileStatus : listStatus) {
System.out.println(fileStatus.getPath().getName()+(fileStatus.isDirectory()?" is a directory":" is not a directory"));
}
}
}
4.如果在window中访问hdfs文件遇到权限问题时,需要设置run configuration运行参数,在argument中添加运行参数:-DHADOOP_USER_NAME=hadoop