package com.bjsxt;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.hive.HiveContext;
public class CreateDFFromHiveLocalTest {
public static void main(String[] args) {
SparkConf conf =new SparkConf().setAppName("hive").setMaster("local");
JavaSparkContext sc=new JavaSparkContext(conf);
//SQLContext sqlContext=new SQLContext(sc);
//HiveContext是SQLContext的子类
HiveContext hiveContext=new HiveContext(sc);
hiveContext.sql("USE spark");
hiveContext.sql("DROP TABLE IF EXISTS student_infos");