1.拷贝hive-site.xml到spark/conf下,拷贝mysql-connector-java-xxx-bin.jar到hive/lib下
2.开启hive元数据服务:hive --service metastore
3.开启hadoop服务:sh $HADOOP_HOME/sbin/start-all.sh
4.开启spark服务:sh $SPARK_HOME/sbin/start-all.sh
5.进入spark-shell:spark-shell
6.scala操作hive(spark-sql)
scala>val conf=new SparkConf().setAppName("SparkHive").setMaster("local") //可忽略,已经自动创建了
scala>val sc=new SparkContext(conf) //可忽略,已经自动创建了
scala>val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)
scala>sqlContext.sql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ‘\t‘ ")//这里需要注意数据的间隔符
scala>sqlContext.sql("LOAD DATA INPATH ‘/user/spark/src.txt‘ INTO TABLE src ");
scala>sqlContext.sql(" SELECT * FROM src").collect().foreach(println)
scala>sc.stop()
SQL context available as sqlContext.
scala> val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)
17/12/05 10:38:51 INFO HiveContext: Initializing execution hive, version 1.2.1
17/12/05 10:38:51 INFO ClientWrapper: Inspected Hadoop version: 2.4.0
17/12/05 10:38:51 INFO ClientWrapper: Loaded org.apache.hadoop.hive.shims.Hadoop23Shims for Hadoop version 2.4.0
17/12/05 10:38:51 WARN HiveConf: HiveConf of name hive.metastore.local does not exist
17/12/05 10:38:51 WARN HiveConf: HiveConf of name hive.server2.webui.port does not exist
17/12/05 10:38:51 WARN HiveConf: HiveConf of name hive.server2.webui.host does not exist
17/12/05 10:38:51 WARN HiveConf: HiveConf of name hive.enable.spark.execution.engine does not exist
17/12/05 10:38:51 INFO metastore: Mestastore configuration hive.metastore.warehouse.dir changed from file:/tmp/spark-ecfcdcc1-2bb0-4efc-aa00-96ad1dd47840/metastore to file:/tmp/spark-ea48b58b-ef90-43c0-8d5e-f54a4b4cadde/metastore
17/12/05 10:38:51 INFO metastore: Mestastore configuration javax.jdo.option.ConnectionURL changed from jdbc:derby:;databaseName=/tmp/spark-ecfcdcc1-2bb0-4efc-aa00-96ad1dd47840/metastore;create=true to jdbc:derby:;databaseName=/tmp/spark-ea48b58b-ef90-43c0-8d5e-f54a4b4cadde/metastore;create=true
17/12/05 10:38:51 INFO HiveMetaStore: 0: Shutting down the object store...
17/12/05 10:38:51 INFO audit: ugi=rootip=unknown-ip-addrcmd=Shutting down the object store...
17/12/05 10:38:51 INFO HiveMetaStore: 0: Metastore shutdown complete.
17/12/05 10:38:51 INFO audit: ugi=rootip=unknown-ip-addrcmd=Metastore shutdown complete.
17/12/05 10:38:51 INFO HiveMetaStore: 0: Opening raw store with implemenation class:org.apache.hadoop.hive.metastore.ObjectStore
17/12/05 10:38:51 INFO ObjectStore: ObjectStore, initialize called
17/12/05 10:38:51 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored
17/12/05 10:38:51 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored
17/12/05 10:38:56 WARN HiveConf: HiveConf of name hive.metastore.local does not exist
17/12/05 10:38:56 WARN HiveConf: HiveConf of name hive.server2.webui.port does not exist
17/12/05 10:38:56 WARN HiveConf: HiveConf of name hive.server2.webui.host does not exist
17/12/05 10:38:56 WARN HiveConf: HiveConf of name hive.enable.spark.execution.engine does not exist
17/12/05 10:38:56 INFO ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order"
17/12/05 10:38:57 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MFieldSchema" is tagged as "embedded-only" so does not have its own datastore table.
17/12/05 10:38:57 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MOrder" is tagged as "embedded-only" so does not have its own datastore table.
17/12/05 10:39:01 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MFieldSchema" is tagged as "embedded-only" so does not have its own datastore table.
17/12/05 10:39:01 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MOrder" is tagged as "embedded-only" so does not have its own datastore table.
17/12/05 10:39:01 INFO MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY
17/12/05 10:39:01 INFO ObjectStore: In