8.hadoop脚本分析
hadoop脚本在../hadoop/sbin目录下,也可以通过which cmd查看脚本路径
1.star-all.sh
#!/usr/bin/env bash
echo "This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh"
bin=`dirname "${BASH_SOURCE-$0}"` //通过环境变量BASH_SOURCE获取路径,如果不存在获取$0,即文件本身路径
bin=`cd "$bin"; pwd` //获取绝对路径
DEFAULT_LIBEXEC_DIR="$bin"/../libexec //获取上级目录libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} //类似于-,变量替换
. $HADOOP_LIBEXEC_DIR/hadoop-config.sh //执行hadoop-config.sh脚本,设置变量
# start hdfs daemons if hdfs is present
if [ -f "${HADOOP_HDFS_HOME}"/sbin/start-dfs.sh ]; then
"${HADOOP_HDFS_HOME}"/sbin/start-dfs.sh --config $HADOOP_CONF_DIR //调用./sbin/start-dfs.sh,启动hdfs
fi
# start yarn daemons if yarn is present
if [ -f "${HADOOP_YARN_HOME}"/sbin/start-yarn.sh ]; then
"${HADOOP_YARN_HOME}"/sbin/start-yarn.sh --config $HADOOP_CONF_DIR //调用./start-yarn.sh,启动yarn
fi
2.hadoop-config.sh
设置各种环境变量
HADOOP_CONF_DIR
HEAP_SIZE=1000m
CLASSPATH
3.star-dfs.sh –config $HADOOP_CONF_DIR //启动hdfs
1.libexec/hdfs-config.sh //设置环境变量
2.hdfs getconf -namenodes //获取namenode主机名
3."./sbin/hadoop-damons.sh " \ //启动namenode
--config "HADOOP_CONF_DIR" \
--hostname "NAMENODES" \
--script "$bin/hdfs" start namenode $nameStartOpt
4."./sbin/hadoop-damons.sh " \ //启动datanode
--config "HADOOP_CONF_DIR" \
--script "$bin/hdfs" start datanode $nameStartOpt
5."./sbin/hadoop-damons.sh " \ //启动secondarynamenode
--config "HADOOP_CONF_DIR" \
--hostname "SECONDARY_NAMENODES" \
--script "$bin/hdfs" start secondaryname
4.hdfs-config.sh
最终还是会调用 hadoop-config.sh
5../