import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import junit.framework.TestCase;
public class TestHDFS extends TestCase {
public static String hdfsUrl = "hdfs://192.168.1.106:8020";
@Test
public void testHDFSMkdir() throws Exception{ //create HDFS folder 创建一个文件夹
Configuration conf= new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
Path path = new Path("/test");
fs.mkdirs(path);
}
@Test
public void testCreateFile() throws Exception{ //create a file 创建一个文件
Configuration conf= new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
Path path = new Path("/test/a.txt");
FSDataOutputStream out = fs.create(path);
out.write("hello hadoop".getBytes());
}
@Test
public void testRenameFile() throws Exception{ //rename a file 重命名
Configuration conf= new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
Path path = new Path("/test/a.txt");
Path newPath = new Path("/test/b.txt");
System.out.println(fs.rename(path, newPath));
}
@Test
public void testUploadLocalFile1() throws Exception{ //upload a local file 上传文件
Configuration conf= new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
Path src = new Path("/home/hadoop/hadoop-1.2.1/bin/rcc");
Path dst = new Path("/test");
fs.copyFromLocalFile(src, dst);
}
@Test
public void testUploadLocalFile2() throws Exception{ //upload a local file 上传文件
Configuration conf= new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
Path src = new Path("/home/hadoop/hadoop-1.2.1/bin/rcc");
Path dst = new Path("/test");
InputStream in = new BufferedInputStream(new FileInputStream(new File("/home/hadoop/hadoop-1.2.1/bin/rcc")));
FSDataOutputStream out = fs.create(new Path("/test/rcc1"));
IOUtils.copyBytes(in, out, 4096);
}
@Test
public void testListFIles() throws Exception{ //list files under folder 列出文件
Configuration conf= new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
Path dst = new Path("/test");
FileStatus[] files = fs.listStatus(dst);
for(FileStatus file: files){
System.out.println(file.getPath().toString());
}
}
@Test
public void testGetBlockInfo() throws Exception{ //list block info of file 查找文件所在的数据块
Configuration conf= new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsUrl),conf);
Path dst = new Path("/test/rcc");
FileStatus fileStatus = fs.getFileStatus(dst); BlockLocation[] blkloc=fs.getFileBlockLocations(fileStatus,0,fileStatus.getLen()); //查找文件所在数据块
for(BlockLocation loc: blkloc){
for(int i=0;i < loc.getHosts().length;i++)
System.out.println(loc.getHosts()[i]);
}
}
}