import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URI;
import java.util.Arrays;
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;
public class HdfsClient {
public FileSystem fs = null;
/**
* 创建客户端用户
* @throws Exception
*/
@Before
public void init() throws Exception {
Configuration con = new Configuration();
fs = FileSystem.get(new URI("hdfs://j01:9000/"), con, "root");
}
/**
* 在hdfs中创建文件夹
* @throws Exception
*/
@Test
public void mkdirTest() throws Exception {
boolean b = fs.mkdirs(new Path("/eclipse"));
System.out.println(b);
}
/**
* 上传文件
* @throws Exception
*/
@Test
public void testPut() throws Exception {
fs.copyFromLocalFile(new Path("F:\\worcount.txt"), new Path("/eclipse/"));
fs.close();
}
/**
* 下载文件
* @throws Exception
*/
@Test
public void testGet() throws Exception {
fs.copyToLocalFile(new Path("/t1.txt"), new Path("E:\\t1.txt"));
fs.close();
}
/**
* 移动hdfs中的文件位置
* @throws Exception
*/
@Test
public void testMove() throws Exception {
boolean b = fs.rename(new Path("/t2.txt"), new Path("/eclipse"));
System.out.println(b);
fs.close();
}
/**
* 删除hdfs中的文件或者目录
* @throws Exception
*/
@Test
public void testDel() throws Exception {
boolean b = fs.delete(new Path("/worcount.txt"), false); // 参数为true表示递归删除,false非递归删除
System.out.println(b);
fs.close();
}
/**
* 查看目录信息,这个方法里面的都是文件信息
* @throws Exception
*/
@Test
public void testLs() throws Exception {
RemoteIterator<LocatedFileStatus> lf = fs.listFiles(new Path("/eclipse"), true); // true递归显示目录信息
while (lf.hasNext()) {
LocatedFileStatus next = lf.next();
System.out.println("文件的全路径:" + next.getPath());
System.out.println("最近访问时间:" + new Date(next.getAccessTime()));
System.out.println("文件块的大小:" + next.getBlockSize());
System.out.println("文件的大小:" + next.getLen());
System.out.println("最后修改时间:" + new Date(next.getModificationTime()));
System.out.println("文件块的存放位置:" + Arrays.toString(next.getBlockLocations()));
System.out.println("----------块的信息----------------------");
BlockLocation[] bl = next.getBlockLocations();
for (BlockLocation block : bl) {
System.out.println("本块的大小:" + block.getLength());
System.out.println("本块在整个文件中的起始偏移量:" + block.getOffset());
System.out.println("本块的副本所在datanode主机" + Arrays.toString(block.getHosts()));
}
System.out.println("----------块的信息----------------------");
System.out.println("文件所属组:" + next.getGroup());
System.out.println("文件的所属者:" + next.getOwner());
System.out.println("文件的副本数量:" + next.getReplication());
}
fs.close();
}
/**
* 查看文件夹的信息
* @throws Exception
*/
@Test
public void testLs2() throws Exception {
// 很多的方法和查看文件的方法一样,不一一举例
FileStatus[] ls = fs.listStatus(new Path("/"));
for (FileStatus status : ls) {
System.out.println(status.isDirectory() ? "文件夹" : "文件");
System.out.println("文件夹或者文件的全路径:" + status.getPath());
System.out.println("**************************************************");
}
fs.close();
}
/**
* 读取hdfs中的文件
* @throws Exception
*/
@Test
public void testReadFile() throws Exception {
FSDataInputStream fis = fs.open(new Path("/worcount.txt"));
BufferedReader br = new BufferedReader(new InputStreamReader(fis));
String len = null;
while ((len = br.readLine()) != null) {
System.out.println(len);
}
fis.close();
br.close();
fs.close();
}
/**
* 随机定为读取数据:想从哪读从哪读,读多长自己控制
* @throws Exception
*/
@Test
public void testRandomReadFile() throws Exception {
FSDataInputStream fis = fs.open(new Path("/eclipse/friends.txt"));
fis.seek(5);
byte[] b = new byte[10];
fis.read(b);
System.out.println(new String(b));
fis.close();
fs.close();
}
/**
* 向hdfs的文件写入内容
* @throws Exception
*/
@Test
public void testWriteFile() throws Exception {
FSDataOutputStream fos = fs.append(new Path("/t1.txt"));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fos));
bw.write("写入内容");
fos.close();
bw.close();
fs.close();
}
}
Hdfs的Java客户端API简单使用
于 2020-06-14 14:05:31 首次发布