Permission denied: user=administrator, access=WRITE, inode="/":root:supergroup:drwxr-xr-x
收藏中有网上的解决方法,更改系统环境变量我没有生效。我的解决方法:
object HiveSupport {
def main(args: Array[String]): Unit = {
System.setProperty("HADOOP_USER_NAME","hadoop")
val spark: SparkSession = SparkSession.builder().appName("HiveSupport").master("local[*]")
.config("spark.sql.warehouse.dir", "hdfs://Ip:8020/user/hive/warehouse")
.config("hive.metastore.uris", "thrift://IP:9083")
.enableHiveSupport().getOrCreate()
spark.sparkContext.setLogLevel("WARN")
// spark.sql("use testdb").show()
//查看有哪些表
spark.sql("show tables").show()
//创建表
// spark.sql("CREATE TABLE IF NOT EXISTS person (id int,name string,age int) row format delimited fields terminated by ' '")
//加载数据,数据为当前SparkDemo项目目录下的person。txt
// spark.sql("LOAD DATA LOCAL INPATH '/opt/person.txt' INTO TABLE PERSON")
// spark.sql("show tables").show()
spark.sql("select * from customers limit 5").show()
// spark.sql("show create table person").show()
spark.stop()
}
}