spark-env.sh环境配置:(添加hive支持)
export JAVA_HOME=/usr/lib/jdk1.8.0_171
export SPARK_HISTORY_OPTS="-Dspark.history.kerberos.enabled=false \
-Dspark.history.kerberos.principal= \
-Dspark.history.kerberos.keytab="
HADOOP_CONF_DIR=/***/emr-hadoop-2.7.2/etc/hadoop
export HADOOP_HOME=/***/emr-hadoop-2.7.2
export HADOOP_CONF_DIR=/***/emr-hadoop-2.7.2/etc/hadoop
export YARN_CONF_DIR=/***/emr-hadoop-2.7.2/etc/hadoop
export CLASSPATH=$CLASSPATH:/***/emr-apache-hive-2.3.2-bin/lib
export HIVE_CONF_DIR=/***/emr-apache-hive-2.3.2-bin/conf
export SPARK_CLASSPATH=$SPARK_CLASSPATH:/***/emr-apache-hive-2.3.2-bin/lib/mysql-connector-java-5.1.38.jar
scala代码&