一、首先自然是导包
$HADOOP_HOME/share/hadoop/common/*.jar
$HADOOP_HOME/share/hadoop/common/lib/*.jar
$HADOOP_HOME/share/hadoop/hdfs/*.jar
$HADOOP_HOME/share/hadoop/hdfs/lib/*.jar
二、代码如下
package com.stu.hdfs;
/**
*
* @author ysw28
* HDFS的API操作
*/
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
public class TestDemo319 {
@Test
// 创建目录
public void mkDir() {
try {
// 连接HDFS,设置副本数
Configuration conf = new Configuration();
conf.set("dfs.replication", "1");
// 创建一个客户端
FileSystem client = FileSystem.get(new URI("hdfs://HP110:9000"), conf, "root");
// 创建目录
Path path = new Path("/Demo319");
client.mkdirs(path);
// 关闭客户端
client.close();
System.out.println("创建目录成功");
} catch (Exception e) {
e.printStackTrace();
System.out.println("创建目录失败");
}
}
@Test
// 删除目录,删除文件同理
public void delDir() {
try {
// 连接HDFS
Configuration conf = new Configuration();
// 创建客户端
FileSystem client = FileSystem.get(new URI("hdfs://HP110:9000"), conf, "root");
// 删除目录
Path path = new Path("/Demo319");
if (client.exists(path)) {
client.delete(path, true);
// 关闭客户端
client.close();
System.out.println("删除目录成功");
} else {
System.out.println("没有这个文件");
}
} catch (Exception e) {
e.printStackTrace();
System.out.println("删除目录失败");
}
}
@Test
// 上传文件
public void copyFromLocal() {
try {
// 连接HDFS,指定副本数
Configuration conf = new Configuration();
conf