package cn.test;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/*fs.copyFromLocalFile(src, dst);//本地文件上传
fs.copyToLocalFile(src, dst);//下载
fs.create(f)//通过流向文件里面写内容
fs.deleteOnExit(f)//临时文件,程序关闭的时候删除文件
fs.getFileBlockLocations(file, start, len)//返回一个文件快的信息
fs.getReplication(src)//拿到副本
fs.listCorruptFileBlocks(path)//拿到毁坏的文件
fs.listFiles(f, recursive)//迭代recursive
fs.open(f)//文件读取
*/
public class TestHDFS {
Configuration conf;
FileSystem fs;
@Before
public void before() throws IOException {
//加载classpath下的配置文件来创建Conf对象
conf = new Configuration(true);
//通过conf创建文件系统对象
fs = FileSystem.get(conf);
}
@After
public void after() throws IOException {
//关闭文件系统对象
fs.close();
}
@Test
public void testLs() throws Exception {
//代表HDFS的根路径的path对象
Path path = new Path("/");
FileStatus[] fileStatus = fs.listStatus(path);
for (FileStatus fstatus : fileStatus) {
if (fstatus.isDirectory()) {
System.out.println("目录");
System.out.println(fstatus.getPath());//这里就是你网盘的根目录
System.out.println(fstatus.getPath().getName());
System.out.println(fstatus.getOwner());
}
else {
System.out.println("文件");
}
}
}
@SuppressWarnings("deprecation")
@Test
public void testBlockLocation() throws Exception {
Path path = new Path("/user/root/hello.txt");
long length = fs.getLength(path);
//datanode的块信息
BlockLocation[] blockLocations = fs.getFileBlockLocations(path, 0, length);
for (BlockLocation bl : blockLocations) {
System.out.println(bl.getLength()+"\n"+bl.getNames());
}
}
@Test//上传
public void testUpload() throws Exception {
FSDataOutputStream os = fs.create(new Path("/user/root/newfile.txt"));
FileInputStream fis = new FileInputStream("D:\\test\\22.txt");
byte[] buf = new byte[1024];
int len = -1;
while ((len = fis.read(buf)) != -1) {
os.write(buf, 0, len);
System.out.println("success");
}
fis.close();
os.flush();
os.close();
}
@Test//上传二
public void testUpload1() throws IOException {
FSDataOutputStream os = fs.create(new Path("/user/root/newfile2.txt"));
FileInputStream fis = new FileInputStream("D:\\test\\22.txt");
IOUtils.copyBytes(fis, os, conf);
fis.close();
os.close();
}
@Test//下载
public void testdownlowd() throws IOException {
/* InputStream in = fs.open(new Path("/user/root/hello.txt"));
OutputStream out = new FileOutputStream("D:/test/hello.txt");
IOUtils.copyBytes(in, out, 4096, true); */
fs. copyToLocalFile(false,new Path("/user/root/newfile.txt"),new Path("D:/test/hello.txt"),true);
}
@Test//删除
public void testdelte() throws IOException {
/* Path in =new Path("/user/root/newfile2.txt");
fs.delete(in, true);
Configuration conf2 = fs.getConf(); hdfs:
*/
//fs.delete(new Path("hdfs://mycluster/yam/ha/result/_SUCCESS"),true);
fs.delete(new Path("hdfs://mycluster/user/root/test.txt"),true);
}
}