HDFS根据命令编写代码
package com.bigdata.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
public class HDFSDemo01 {
@Test
public void test01(){
//伪装hadoop用户
System.setProperty("HADOOP_USER_NAME","hadoop");
//创建Java的HDFS客户端对象 FileSystem
//conf 加载core-default.xml
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://hadoop101:9000");
try {
FileSystem fs = FileSystem.get(conf);
boolean mkdirs = fs.mkdirs(new Path("/java"));
System.out.println("mkdirs== " + mkdirs);
fs.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* @Author liqin
* @Description 复制本地文件到hdfs
* @Date 21:10 2020/11/15
* @Param []
* @return void
**/
@Test
public void test02(){
//conf 加载core-default.xml
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:9000"),conf,"hadoop");
fs.copyFromLocalFile(new Path("E:/code/a.txt"), new Path("/"));
fs.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @Author liqin
* @Description 移动hdfs文件到本地
* @Date 21:10 2020/11/15
* @Param []
* @return void
**/
@Test
public void test03(){
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:9000"),conf,"hadoop");
// fs.copyToLocalFile(false,new Path("/a.txt"), new Path("E:/"),true);
fs.copyToLocalFile(false,new Path("/a.txt"), new Path("E:/"));
fs.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @Author liqin
* @Description 查看hdfs文件及目录信息,相当于-ls命令
* @Date 21:10 2020/11/15
* @Param []
* @return void
**/
@Test
public void test04(){
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:9000"),conf,"hadoop");
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
for (FileStatus fileStatus : fileStatuses)
{
System.out.println("Path = " + fileStatus.getPath().toString());
System.out.println("Owner = " + fileStatus.getOwner());
System.out.println("Group = " + fileStatus.getGroup());
System.out.println("BlockSize = " + fileStatus.getBlockSize());
System.out.println("Len = " + fileStatus.getLen());
System.out.println("mTime = " + fileStatus.getModificationTime());
System.out.println("rep = " + fileStatus.getReplication());
System.out.println("dir = " + fileStatus.isDirectory());
System.out.println("isFile = " + fileStatus.isFile());
FsPermission per = fileStatus.getPermission();
System.out.println(per.toString());
}
fs.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @Author liqin
* @Description 查看hdfs文件信息
* @Date 21:10 2020/11/15
* @Param []
* @return void
**/
@Test
public void test05(){
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:9000"),conf,"hadoop");
RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path("/"), true);
while (iterator.hasNext()){
BlockLocation[] blockLocations = iterator.next().getBlockLocations();
for (BlockLocation blockLocation : blockLocations)
{
System.out.println("Hosts = " + Arrays.toString(blockLocation.getHosts()));
System.out.println("Length = " + blockLocation.getLength());
System.out.println("Names = " + Arrays.toString(blockLocation.getNames()));
}
System.out.println("-----------------------------------");
}
fs.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @Author liqin
* @Description 删除hdfs文件
* @Date 21:10 2020/11/15
* @Param []
* @return void
**/
@Test
public void test06(){
Configuration conf = new Configuration();
try {
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop101:9000"),conf,"hadoop");
boolean delete = fs.delete(new Path("/data"), true);
// boolean b = fs.deleteOnExit(new Path("/data")); //程序退出时才执行
System.out.println(delete);
fs.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
HDFSDemo02
package com.bigdata.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.FileOutputStream;
import java.io.IOException;
public class HDFSDemo02
{
private FileSystem fs;
private Logger log = Logger.getLogger(HDFSDemo02.class);//log.info() 调用
//运行测试前执行
@Before
public void fileSystem() throws IOException
{
System.setProperty("HADOOP_USER_NAME","hadoop");
Configuration conf = new Configuration();
fs = FileSystem.get(conf);
}
/**
* @Author liqin
* @Description 下载hdfs文件到本地
* @Date 15:17 2020/11/16
* @Param []
* @return void
**/
@Test
public void test01() throws IOException
{
FSDataInputStream open = fs.open(new Path("/a.txt"));
FileOutputStream fos = new FileOutputStream("E://a.txt");
byte[] bytes = new byte[1024];
int len = 0;
while ((len = open.read(bytes)) != -1){
log.info(new String(bytes,0,len));
fos.write(bytes, 0, len);
// System.out.println(new String(bytes,0,len));
}
fos.close();
open.close();
}
//运行测试后执行
@After
public void close() throws IOException
{
fs.close();
}
}
用日志Logger需要日志配置文件