import sys, os
# Path for spark source folder
os.environ['SPARK_HOME'] = "/opt/spark-2.0.1-bin-hadoop2.7"
# Append pyspark to Python Path
sys.path.append("/opt/spark-2.0.1-bin-hadoop2.7/python/")
sys.path.append("/opt/spark-2.0.1-bin-hadoop2.7/python/lib/")
try:
from pyspark import SparkContext
from pyspark import SparkConf
from pyspark.sql import SparkSession
from pyspark.sql import SQLContext
print("Successfully imported Spark Modules")
except ImportError as e:
print("Can not import Spark Modules", e)
sys.exit(1)
from pyspark.sql import Row
spark_home的配置
最新推荐文章于 2023-07-05 15:32:58 发布