Standalone、On Yarn模式

[root@master ~]# cd /usr/local/soft/
[root@master soft]# ls
0??           hive-1.2.1      redis        show
A??           jars            redis-6.2.6  sqoop-1.4.7
data          jdk1.8.0_171    scripts      test.txt
hadoop-2.7.6  packages        shell        zookeeper-3.4.6
hbase-1.4.6   phoenix-4.15.0  shell01
[root@master soft]# cd packages/
[root@master packages]# ls
apache-hive-1.2.1-bin.tar.gz
apache-phoenix-4.15.0-HBase-1.4-bin.tar.gz
hadoop-2.7.6.tar.gz
hbase-1.4.6-bin.tar.gz
redis-6.2.6.tar.gz
sqoop-1.4.7.bin__hadoop-2.6.0.tar.gz
zookeeper-3.4.6.tar.gz
[root@master packages]# rz -E
rz waiting to receive.
[root@master packages]# ls
apache-hive-1.2.1-bin.tar.gz
apache-phoenix-4.15.0-HBase-1.4-bin.tar.gz
hadoop-2.7.6.tar.gz
hbase-1.4.6-bin.tar.gz
redis-6.2.6.tar.gz
spark-2.4.5-bin-hadoop2.7.tgz
sqoop-1.4.7.bin__hadoop-2.6.0.tar.gz
zookeeper-3.4.6.tar.gz
[root@master packages]# tar -zxvf spark-2.4.5-bin-hadoop2.7.tgz  -C /usr/local/soft
[root@master packages]# cd ..
[root@master soft]# ls
0??           jdk1.8.0_171    shell01
A??           packages        show
data          phoenix-4.15.0  spark-2.4.5-bin-hadoop2.7
hadoop-2.7.6  redis           sqoop-1.4.7
hbase-1.4.6   redis-6.2.6     test.txt
hive-1.2.1    scripts         zookeeper-3.4.6
jars          shell
[root@master soft]# mv spark-2.4.5-bin-hadoop2.7 spark-2.4.5
[root@master soft]# ls
0??           hive-1.2.1      redis        show
A??           jars            redis-6.2.6  spark-2.4.5
data          jdk1.8.0_171    scripts      sqoop-1.4.7
hadoop-2.7.6  packages        shell        test.txt
hbase-1.4.6   phoenix-4.15.0  shell01      zookeeper-3.4.6
[root@master soft]# cd spark-2.4.5/
[root@master spark-2.4.5]# ls
bin   examples    LICENSE   python     RELEASE
conf  jars        licenses  R          sbin
data  kubernetes  NOTICE    README.md  yarn
[root@master spark-2.4.5]# pwd
/usr/local/soft/spark-2.4.5
[root@master spark-2.4.5]# cd bin/
[root@master bin]# ls
beeline               pyspark.cmd       spark-shell
beeline.cmd           run-example       spark-shell2.cmd
docker-image-tool.sh  run-example.cmd   spark-shell.cmd
find-spark-home       spark-class       spark-sql
find-spark-home.cmd   spark-class2.cmd  spark-sql2.cmd
load-spark-env.cmd    spark-class.cmd   spark-sql.cmd
load-spark-env.sh     sparkR            spark-submit
pyspark               sparkR2.cmd       spark-submit2.cmd
pyspark2.cmd          sparkR.cmd        spark-submit.cmd
[root@master bin]# cd ..
[root@master spark-2.4.5]# pwd
/usr/local/soft/spark-2.4.5
[root@master spark-2.4.5]# vim /etc/profile
[root@master spark-2.4.5]# source /etc/profile
[root@master spark-2.4.5]# cd conf/
[root@master conf]# ls
docker.properties.template   slaves.template
fairscheduler.xml.template   spark-defaults.conf.template
log4j.properties.template    spark-env.sh.template
metrics.properties.template
[root@master conf]# cp spark-env.sh.template spark-env.sh
[root@master conf]# cp slaves.template slaves
[root@master conf]# ls
docker.properties.template   slaves.template
fairscheduler.xml.template   spark-defaults.conf.template
log4j.properties.template    spark-env.sh
metrics.properties.template  spark-env.sh.template
slaves
[root@master conf]# vim slaves
[root@master conf]# vim spark-env.sh

export SPARK_MASTER_IP=master
export SPARK_MASTER_PORT=7077
export SPARK_WORKER_CORES=2
export SPARK_WORKER_INSTANCES=1
export SPARK_WORKER_MEMORY=2g
export JAVA_HOME=/usr/local/soft/jdk1.8.0_171

 cd /usr/local/soft/
scp -r spark-2.4.5 node1:`pwd`
scp -r spark-2.4.5 node2:`pwd`

[root@master soft]# cd spark-2.4.5/
[root@master spark-2.4.5]# ls
bin   examples    LICENSE   python     RELEASE
conf  jars        licenses  R          sbin
data  kubernetes  NOTICE    README.md  yarn
[root@master spark-2.4.5]# pwd
/usr/local/soft/spark-2.4.5
[root@master spark-2.4.5]# ./sbin/start-all.sh
starting org.apache.spark.deploy.master.Master, logging to /usr/local/soft/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.master.Master-1-master.out
node2: starting org.apache.spark.deploy.worker.Worker, logging to /usr/local/soft/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-node2.out
node1: starting org.apache.spark.deploy.worker.Worker, logging to /usr/local/soft/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-node1.out
[root@master spark-2.4.5]# jps
56450 Master
56527 Jps

http://master:8080/

[root@master spark-2.4.5]# ls
bin   examples    LICENSE   NOTICE  README.md  yarn
conf  jars        licenses  python  RELEASE
data  kubernetes  logs      R       sbin
[root@master spark-2.4.5]# cd examples/
[root@master examples]# ls
jars  src
[root@master examples]# cd jars/
[root@master jars]# ls
scopt_2.11-3.7.0.jar  spark-examples_2.11-2.4.5.jar
[root@master jars]# pwd
/usr/local/soft/spark-2.4.5/examples/jars
[root@master jars]# spark-submit --class org.apache.spark.examples.SparkPi --master spark://master:7077 --executor-memory 512m --total-executor-cores 1 spark-examples_2.11-2.4.5.jar 100
[root@master jars]# spark-submit --class org.apache.spark.examples.SparkPi --master spark://master:7077 --executor-memory 512M --total-executor-cores 1 --deploy-mode cluster spark-examples_2.11-2.4.5.jar 100
[root@master jars]# spark-shell master spark://master:7077

scala> sc.textFile("/usr/local/soft/data/students.txt")
res0: org.apache.spark.rdd.RDD[String] = /usr/local/soft/data/students.txt MapPartitionsRDD[1] at textFile at <console>:25

scala> val stuRDD = sc.textFile("/usr/local/soft/data/students.txt")
stuRDD: org.apache.spark.rdd.RDD[String] = /usr/local/soft/data/students.txt MapPartitionsRDD[3] at textFile at <console>:24

scala> stuRDD.take(10).foreach(println)
1500100001,施笑槐,22,女,文科六班
1500100002,吕金鹏,24,男,文科六班
1500100003,单乐蕊,22,女,理科六班
1500100004,葛德曜,24,男,理科三班
1500100005,宣谷芹,22,女,理科五班
1500100006,边昂雄,21,男,理科二班
1500100007,尚孤风,23,女,文科六班
1500100008,符半双,22,女,理科六班
1500100009,沈德昌,21,男,理科一班
1500100010,羿彦昌,23,男,理科六班
[root@master jars]# cd ..
[root@master examples]# ls
jars  src
[root@master examples]# cd ..
[root@master spark-2.4.5]# ls
bin   examples    LICENSE   NOTICE  README.md  yarn
conf  jars        licenses  python  RELEASE
data  kubernetes  logs      R       sbin
[root@master spark-2.4.5]# pwd
/usr/local/soft/spark-2.4.5
[root@master spark-2.4.5]# ./sbin/stop-all.sh
node1: stopping org.apache.spark.deploy.worker.Worker
node2: stopping org.apache.spark.deploy.worker.Worker
stopping org.apache.spark.deploy.master.Master
[root@master spark-2.4.5]# jps
57236 Jps
[root@master soft]# cd spark-2.4.5/
[root@master spark-2.4.5]# ls
bin   examples    LICENSE   NOTICE  README.md  yarn
conf  jars        licenses  python  RELEASE
data  kubernetes  logs      R       sbin
[root@master spark-2.4.5]# cd conf/
[root@master conf]# ls
docker.properties.template   slaves.template
fairscheduler.xml.template   spark-defaults.conf.template
log4j.properties.template    spark-env.sh
metrics.properties.template  spark-env.sh.template
slaves
[root@master conf]# pwd
/usr/local/soft/spark-2.4.5/conf
[root@master conf]# vim spark-env.sh
export HADOOP_CONF_DIR=/usr/local/soft/hadoop-2.7.6/etc/hadoop
[root@master conf]# cd /usr/local/soft/hadoop-2.7.6/etc/hadoop/
[root@master hadoop]# ls
capacity-scheduler.xml      kms-env.sh
configuration.xsl           kms-log4j.properties
container-executor.cfg      kms-site.xml
core-site.xml               log4j.properties
core-site.xml.0             mapred-env.cmd
hadoop-env.cmd              mapred-env.sh
hadoop-env.sh               mapred-queues.xml.template
hadoop-env.sh.0             mapred-site.xml
hadoop-metrics2.properties  mapred-site.xml.template
hadoop-metrics.properties   slaves
hadoop-policy.xml           slaves.0
hdfs-site.xml               ssl-client.xml.example
hdfs-site.xml.0             ssl-server.xml.example
httpfs-env.sh               yarn-env.cmd
httpfs-log4j.properties     yarn-env.sh
httpfs-signature.secret     yarn-site.xml
httpfs-site.xml             yarn-site.xml.0
kms-acls.xml
[root@master hadoop]# pwd
/usr/local/soft/hadoop-2.7.6/etc/hadoop
[root@master hadoop]# stop-yarn.sh
stopping yarn daemons
no resourcemanager to stop
node2: no nodemanager to stop
node1: no nodemanager to stop
no proxyserver to stop
[root@master hadoop]# vim yarn-site.xml

<property>
        <name>yarn.nodemanager.pmem-check-enabled</name>
        <value>false</value>
    </property>

    <property>
        <name>yarn.nodemanager.vmem-check-enabled</name>
        <value>false</value>
    </property>

[root@master hadoop]# scp -r yarn-site.xml node1:`pwd`
yarn-site.xml                 100% 1381   113.7KB/s   00:00    
[root@master hadoop]# scp -r yarn-site.xml node2:`pwd`
yarn-site.xml                 100% 1381   402.0KB/s   00:00   
[root@master hadoop]# start-yarn.sh
starting yarn daemons
starting resourcemanager, logging to /usr/local/soft/hadoop-2.7.6/logs/yarn-root-resourcemanager-master.out
node1: starting nodemanager, logging to /usr/local/soft/hadoop-2.7.6/logs/yarn-root-nodemanager-node1.out
node2: starting nodemanager, logging to /usr/local/soft/hadoop-2.7.6/logs/yarn-root-nodemanager-node2.out
[root@master hadoop]# jps
58825 Jps
58554 ResourceManager
[root@master hadoop]# start-dfs.sh
Starting namenodes on [master]
master: starting namenode, logging to /usr/local/soft/hadoop-2.7.6/logs/hadoop-root-namenode-master.out
node1: starting datanode, logging to /usr/local/soft/hadoop-2.7.6/logs/hadoop-root-datanode-node1.out
node2: starting datanode, logging to /usr/local/soft/hadoop-2.7.6/logs/hadoop-root-datanode-node2.out
Starting secondary namenodes [0.0.0.0]
0.0.0.0: starting secondarynamenode, logging to /usr/local/soft/hadoop-2.7.6/logs/hadoop-root-secondarynamenode-master.out
[root@master hadoop]# jps
59287 Jps
58953 NameNode
58554 ResourceManager
59163 SecondaryNameNode
[root@master hadoop]# cd /usr/local/soft/spark-2.4.5/examples/jars
[root@master jars]# ls
scopt_2.11-3.7.0.jar  spark-examples_2.11-2.4.5.jar
[root@master jars]# spark-submit --class org.apache.spark.examples.SparkPi --master yarn-client --executor-memory 512M --num-executors 2 spark-examples_2.11-2.4.5.jar 100
[root@master jars]# spark-submit --class org.apache.spark.examples.SparkPi --master yarn-cluster --executor-memory 512m --num-executors 2 --executor-cores 1 spark-examples_2.11-2.4.5.jar 100
[root@master jars]# yarn logs -applicationId application_1652359302466_0002

LogType:stdout
Log Upload Time:星期四 五月 12 20:49:01 +0800 2022
LogLength:0
Log Contents:
End of LogType:stdout

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值