package org.hjc.hdfs;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HdfsTest {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.set("dfs.nameservices", "ns1");
conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
conf.set("dfs.namenode.rpc-address.ns1.nn1", "mini1:9000");
conf.set("dfs.namenode.rpc-address.ns1.nn2", "mini2:9000");
conf.set("dfs.client.failover.proxy.provider.ns1",
"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
conf.set("fs.defaultFS", "hdfs://ns1");
FileSystem fs = FileSystem.get(new URI("hdfs://ns1"), conf, "hadoop");
//fs.deleteOnExit(new Path("/hadoop"));
fs.copyFromLocalFile(new Path("G:\\学习笔记\\hadoop\\HDFS_HA.java"), new Path("/hadoop"));
fs.close();
}
}
Hadoop中提交HDFS文件到HA中
最新推荐文章于 2020-10-30 13:12:53 发布