scp、 拷贝完全相同
scp -r etc/hadoop/dfs.hosts root@192.168.121.134:/usr/local/hadoop/hadoop-2.7.6/etc/hadoop/
rsync、拷贝有差异的文件
rsync -rvl etc/hadoop/hdfs-site.xml root@192.168.121.136:/usr/local/hadoop/hadoop-2.7.6/etc/hadoop/
xsync、循环复制文件到所有节点相同的目录下
!/bin/bash
pcount=$
if((pcount==0)); then
echo no args;
exit;
fi
p1=$1
fname='basename $p1'
echo fname=$fname
3 获取上级目录到绝对路径
pdir='cd -P $(dirname $p1); pwd'
echo pdir=$pdir
4 获取当前用户名称
user='whoami'
5 循环
for((host=103; host<105; host++)); do
echo ------------------- hadooppdir/user@hadooppdir
done