scala连接mongodb_Spark连接MongoDB之Scala

MongoDB Connector for Spark

Spark Connector Scala Guide

spark-shell –jars “mongo-spark-connector_2.11-2.0.0.jar,mongo-hadoop-core-2.0.2.jar,mongo-java-driver-3.4.2.jar”

import org.apache.spark.sql.SparkSession

import com.mongodb.spark._

import com.mongodb.spark.config._

import org.bson.Document

val spark = SparkSession.builder()

.master("local")

.appName("MongoSparkConnector")

.config("spark.some.config.option", "some-value")

.getOrCreate()

val uri = "mongodb://172.1.1.1:27017"

val userDF = spark.sql("""

select

uid,

name,

current_date() version

from test_table

limit 100

""").repartition(8)

// Write to MongoDB

userDF.write.mode("overwrite").format("com.mongodb.spark.sql").options(

Map(

"uri" -> uri,

"database" -> "test",

"collection" -> "test_table")).save()

// Read From MongoDB

val df = spark.read.format("com.mongodb.spark.sql").options(

Map(

"uri" -> uri,

"database" -> "test",

"collection" -> "test_table")).load()

// 其他方式

userDF.write.mode("overwrite").format("com.mongodb.spark.sql").options(

Map(

"spark.mongodb.input.uri" -> uri,

"spark.mongodb.output.uri" -> uri,

"spark.mongodb.output.database" -> "test",

"spark.mongodb.output.collection" -> "test_table")).save()

MongoSpark.save(

userDF.write.mode("overwrite").options(

Map(

"spark.mongodb.input.uri" -> uri,

"spark.mongodb.output.uri" -> uri,

"spark.mongodb.output.database" -> "test",

"spark.mongodb.output.collection" -> "test_table")))

MongoSpark.save(

userDF.write.mode("overwrite").options(

Map(

"uri" -> uri,

"database" -> "test",

"collection" -> "test_table")))

spark.stop()

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值