package com.billstudy.hdfs.test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
/**
* Test hdfs operation (CRUD)
* @author Bill
* @since V1.0 2015年3月22日 - 上午9:45:40
*/
public class HadoopJunitTest {
private final String HDFS_BASE_PATH = "hdfs://h21:9000/";
private FileSystem fs = null;
private final Configuration configuration = new Configuration();
@Before
public void before() throws Exception{
// register supports hdfs protocol
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
// create file system by uri and conf
URI uri = new URI(HDFS_BASE_PATH);
fs = FileSystem.get(uri, configuration);
}
@Test
public void testMkdir() throws IOException, URISyntaxException{
final Path path = new Path("/hey");
// if not exists create
boolean exists = fs.exists(path);
System.out.println(path.getName() + "-exists:" + exists);
if (!exists) {
fs.mkdirs(path);
}
fs.close();
}
@Test
public void testPut() throws Exception{
FSDataOutputStream outSteam = fs.create(new Path("/hey/hi-hadoop.txt"), true);
// inputSteam , outputStream, configuration , copy success is close
IOUtils.copyBytes(
new ByteArrayInputStream("hello hadoop ~".getBytes()),
outSteam,
configuration,
true);
fs.close();
}
@Test
public void testGet() throws Exception{
FSDataInputStream inSteam = fs.open(new Path("/hey/hi-hadoop.txt"));
// read file print to console
IOUtils.copyBytes(inSteam, System.out, configuration, true);
fs.close();
}
@Test
public void testListFile() throws Exception{
FileStatus[] listStatus = fs.listStatus((new Path("/")));
for (FileStatus f : listStatus) {
System.out.println(
( f.isDir() ? "dir" : "file" ) + "\t"
+ ( f.getAccessTime() ) + "\t"
+ ( f.getBlockSize() ) + "\t"
+ ( f.getGroup() ) + "\t"
+ ( f.getLen() ) + "\t"
+ ( f.getModificationTime() ) + "\t"
+ ( f.getReplication() ) + "\t"
+ ( f.getPermission() ) + "\t"
+ ( f.getPath().getName() ) + "\t"
);
}
}
@Test
public void testDelete() throws IOException{
Path path = new Path("/hey");
// recursive delete , likeness shell rmr
//fs.delete(path, true);
fs.deleteOnExit(path);
}
}
hadoop - hdfs base operation (Java api )
最新推荐文章于 2022-04-14 11:06:00 发布