1. Spark写入MongoDB的实例代码
如下代码实现将RDD写入到MongoDB的spark数据库的oc集合(Collection)中
package spark.examples.db
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkContext, SparkConf}
import com.mongodb.hadoop.MongoOutputFormat
import org.apache.spark.SparkContext._
import org.bson.BasicBSONObject
object SparkMongoDBIntegration {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("SparkRDDCount").setMaster("local")
val sc = new SparkContext(conf);
val data = sc.parallelize(List(("Tom", 31), ("Jack", 22), ("Mary", 25)))
val config = new Configuration()
//spark.oc指的是,spark是MongoDB的数据库的名字,而ic表示数据库中的一个Collection
config.set("mongo.input.uri", "mongodb://192.168.26.137:27017/spark.ic")
co