- 免密登录
生成公钥私钥 3次按回车
[hadoop@zjgm01 ~]$ ssh-keygen -t rsa
查看密文 不修改
vi /home/hadoop/.ssh/id_rsa
将公钥copy到电脑上 密码是hadoop
[hadoop@zjgm01 ~]$ ssh-copy-id zjgm01
ssh zjgm01 实现免密登录
查看当前防火墙状态
[hadoop@zjgm01 ~]$ sudo chkconfig iptables --list
iptables 0:off 1:off 2:on 3:on 4:on 5:on 6:off
关闭
[hadoop@zjgm01 ~]$ sudo chkconfig iptables off
查看
[hadoop@zjgm01 ~]$ sudo chkconfig iptables --list
iptables 0:off 1:off 2:off 3:off 4:off 5:off 6:off
[hadoop@zjgm01 ~]$
/home/hadoop/app/hadoop-2.4.1/data/dfs/data/current/BP-230894053-192.168.2.100-1573028182527
新建hdfs02
package com.hdfs;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.FileInputStream;
import java.io.IOException;
public class HdfsUtil {
public static void main(String[] args) throws IOException {
//new配置文件
Configuration conf=new Configuration();
conf.set(“fs.defaultFS”,“hdfs://192.168.2.100:9000”);
//文件上传
FileSystem fs=FileSystem.get(conf);
//上传到哪里
Path path=new Path(“hdfs://192.168.2.100:9000/wms.txt”);
FSDataOutputStream os=fs.create(path);
FileInputStream is=new FileInputStream("C:/Users/19668/Desktop/1.txt");
IOUtils.copy(is,os);
}
}