第一:启动zookeeper
zkServer.sh stop
zkServer.sh start
zkServer.sh status
第二:启动HDFS
start-dfs.sh
stop-dfs.sh
hadoop-daemon.sh start namenode
hadoop-daemon.sh start datanode
hadoop-daemon.sh start zkfc
hadoop-daemon.sh start journalnode
hadoop-daemon.sh start secondarynamenode
hdfs haadmin -transitionToActive --forcemanual nn1
hdfs haadmin -getServiceState nn1
hdfs haadmin -getServiceState nn2
hdfs dfsadmin -safemode get
第三:启动YARN
start-yarn.sh
stop-yarn.sh
yarn-daemon.sh start resourcemanager
yarn-daemon.sh start nodemanager
yarn rmadmin -getServiceState rm1
yarn rmadmin -getServiceState rm2
yarn rmadmin -transitionToActive --forcemanual rm1
第四: 启动MapReduce历史服务器
mr-jobhistory-daemon.sh start historyserver
mr-jobhistory-daemon.sh stop historyserver
第五:启动hbase
stop-hbase.sh
start-hbase.sh
hbase-daemon.sh start master
hbase-daemon.sh start regionserver
第六:启动Hive
sh start-hive.sh
nohup hiveserver2 1>/home/hadoop/log/hive_std.log 2>/home/hadoop/log/hive_err.log &
第七:启动spark
/home/hadoop/apps/spark-2.3.0-bin-hadoop2.7/sbin/start-all.sh
/home/hadoop/apps/spark-2.3.0-bin-hadoop2.7/sbin/stop-all.sh
/home/hadoop/apps/spark-2.3.0-bin-hadoop2.7/sbin/start-master.sh
/home/hadoop/apps/spark-2.3.0-bin-hadoop2.7/sbin/stop-master.sh
/home/hadoop/apps/spark-2.3.0-bin-hadoop2.7/sbin/start-slaves.sh
/home/hadoop/apps/spark-2.3.0-bin-hadoop2.7/sbin/stop-slaves.sh
在hadoop02机器上启动spark的historyserver
/home/hadoop/apps/spark-2.3.0-bin-hadoop2.7/sbin/start-history-server.sh
进入shell:
$SPARK_HOME/bin/spark-shell
$SPARK_HOME/bin/spark-shell
–master spark://hadoop02:7077,hadoop04:7077
–executor-memory 2G
–total-executor-cores 2
~/apps/spark-2.3.0-bin-hadoop2.7/bin/spark-shell
–master spark://hadoop02:7077
–executor-memory 512m
–total-executor-cores 1
spark-shell的wordCount程序:
hadoop fs -mkdir -p /wc/input
hadoop fs -put words.txt /wc/input
sc.textFile(“hdfs://myha01/wc/input/words.txt”).flatMap(.split(" ")).map((,1)).reduceByKey(+).sortBy(_._2, false).collect
第八:启动Storm
nohup $STORM_HOME/bin/storm nimbus 1>~/log/storm-nibus.log 2>&1 &
nohup $STORM_HOME/bin/storm ui 1>~/log/storm-ui.log 2>&1 &
nohup $STORM_HOME/bin/storm supervisor 1>~/log/storm-supervisor.log 2>&1 &