HDFS:在集群上下载文件到本地(windows)
package com.rzhao;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.After;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class HdfsTest {
FileSystem fs;
//会在所有test方法执行之前运行,初始化工作
@Before
public void setup() throws IOException, URISyntaxException, InterruptedException {
//配置连接NN的参数
Configuration configuration = new Configuration();
// configuration.set("fs.defaultFs","master:9000");
// System.setProperty("HADOOP_USER_NAME","root");
// //引用HDFS文件系统的对象
// fs = FileSystem.get(configuration);
fs = FileSystem.get(new URI("hdfs://192.168.2.161:9000"),configuration,"root");
}
//删除
@Test
public void testDelete() throws IOException {
Path dir = new Path("/test1");
fs.delete(dir);
}
//创建
@Test
public void testMkdir() throws IOException {
Path dir1 = new Path("idea");
fs.mkdirs(dir1);
}
//下载
@Test
public void testDown() throws IOException {
Path hdp_path = new Path("/input/word.txt");
Path win_path = new Path("E:/output/a.txt");
fs.copyToLocalFile(hdp_path,win_path);
}
//上传
@Test
public void testUploding() throws IOException {
Path dhp_ph = new Path("E:/input/student.txt");
Path win_ph = new Path("/input");
fs.copyFromLocalFile(dhp_ph,win_ph);
}
@After
public void teardown() throws IOException {
fs.close();
}
}
(前提是在ieda编辑器中编写代码)这里就是简单的在集群上下载、上传、创建、删除
出现错误
1.压缩hadoop-2.7.2-win10.zip(可以压缩到自己的目录)我这台是在D:\programExe下的
2.解压
3.添加环境变量
HADOOP_HOME = “D:\programExe\hadoop2.7.2”
path =D:\programExe\hadoop2.7.2\bin
path =D:\programExe\hadoop2.7.2\sbin
在环境变量中配置两个path和HADOOP_HOME
4.将bin目录下的hadoop.dll和 winutils.exe 分别复制到C:\Windows\System32目录下
5.然后在idea里面试试看可以下载不