1. 运行spark自带例子时遇到几个问题
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop/
# Submit Spark's ApplicationMaster to YARN's ResourceManager, and instruct Spark to run the SparkPi example
SPARK_JAR=./assembly/target/scala-2.9.3/spark-assembly-0.8.1-incubating-hadoop2.2.0.jar \
./spark-class org.apache.spark.deploy.yarn.Client \
--jar ./assembly/target/scala-2.9.3/spark-examples-assembly-0.8.1-incubating.jar \
--class org.apache.spark.examples.SparkPi \
--args yarn-standalone \
--num-workers 3 \
--master-memory 2g \
--worker-memory 2g \
--worker-cores 1
直接运行时总报错,应该先启动 start-master.sh
2. 运行wordcount
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop/
# Submit Spark's ApplicationMaster to YARN's Re