一键配置单机伪分布式Hadoop脚本
jdk要 jdk-8u241-linux-x64.rpm
Hadoop包要 hadoop-2.10.0.tar.gz
主机名 node09
ip地址 192.168.206.200
配置:
vi Hadoop.script
粘贴脚本
chomd +x Hadoop.script
. Hadoop.script
name='node09'
ip='192.168.206.200'
rpm -i jdk-8u241-linux-x64.rpm
echo '********************** jdk install successfully **********************'
echo export JAVA_HOME=/usr/java/jdk1.8.0_241-amd64 >> /etc/profile
echo export HADOOP_HOME=/opt/sxt/hadoop-2.10.0 >> /etc/profile
echo export PATH=\$PATH:\$JAVA_HOME/bin:\$HADOOP_HOME/sbin:\$HADOOP_HOME/bin >> /etc/profile
source /etc/profile
echo '********************** profile successfully **********************'
echo $ip $name > /etc/hosts
echo '********************** hosts successfully **********************'
ssh-keygen -t dsa -P '' -f /root/.ssh/id_dsa
cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
echo '********************** ssh successfully **********************'
tar xf hadoop-2.10.0.tar.gz
mkdir /opt/sxt
mv hadoop-2.10.0 /opt/sxt/
echo '********************** hadoop install successfully **********************'
sed -i '25c export JAVA_HOME=/usr/java/jdk1.8.0_241-amd64' /opt/sxt/hadoop-2.10.0/etc/hadoop/hadoop-env.sh
echo '********************** hadoop-env.sh successfully **********************'
sed -i '16c export JAVA_HOME=/usr/java/jdk1.8.0_241-amd64' /opt/sxt/hadoop-2.10.0/etc/hadoop/mapred-env.sh
echo '********************** mapred-env.sh successfully **********************'
sed -i '23c export JAVA_HOME=/usr/java/jdk1.8.0_241-amd64' /opt/sxt/hadoop-2.10.0/etc/hadoop/yarn-env.sh
echo '********************** yarn-env.sh successfully **********************'
sed -i '$d' /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
sed -i '$d' /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo '<configuration>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' <property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' <name>fs.defaultFS</name>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' <value>hdfs://'$name':9000</value>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' </property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' <property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' <name>hadoop.tmp.dir</name>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' <value>/var/sxt/hadoop/local</value>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo ' </property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo '</configuration>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/core-site.xml
echo '********************** core-site.xml successfully **********************'
sed -i '$d' /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
sed -i '$d' /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
sed -i '$d' /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo '<configuration>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' <property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' <name>dfs.replication</name>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' <value>1</value>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' </property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' <property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' <name>dfs.namenode.secondary.http-address</name>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' <value>'$name':50090</value>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo ' </property>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo '</configuration>' >> /opt/sxt/hadoop-2.10.0/etc/hadoop/hdfs-site.xml
echo '********************** hdfs-site.xml successfully **********************'
echo $name > /opt/sxt/hadoop-2.10.0/etc/hadoop/slaves
echo '********************** slaves successfully **********************'
hdfs namenode -format
echo '********************** format successfully **********************'
start-dfs.sh
echo '********************** start-dfs.sh successfully **********************'