1、首先添加mongo-spark依赖,官网地址 https://docs.mongodb.com/spark-connector/
<dependency>
<groupId>org.mongodb.spark</groupId>
<artifactId>mongo-spark-connector_2.10</artifactId>
<version>1.1.0</version>
</dependency>
2、代码
object Mongo2Hive {
def MongodbToHive(args: Array[String], sc: SparkContext): Unit = {
val hiveContext = new HiveContext(sc)
val Array(schemaFilePath, queryCondition): Array[String] = args
val schemaFile = sc.textFile(schemaFilePath).collect()
val Array(schemaStruct, requireFields, tempTableName, sql, mongodbConfig) = schemaFile
//MongoDB Config
val json = JSON.parseObject(mongodbConfig)
//mongodb host
val hostList