<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-spark-20_2.11</artifactId>
<version>6.7.2</version>
</dependency>
scala> val df = spark.read.format("org.elasticsearch.spark.sql")
.options(
Map("es.nodes" -> "192.168.61.240",
"es.port" -> "8200")
).load("ent_label")
scala> import org.elasticsearch.spark.sql._
scala> val options = Map("es.nodes" -> "192.168.61.240", "es.port" -> "8200")
scala> val df = spark.esDF("ent_label/_doc", "?q=isList:Y", options)
scala> val df = Seq((1, "d"), (2, "c")).toDF("id", "a")
scala> df.write.format("org.elasticsearch.spark.sql")
.options(Map("es.nodes" -> "192.168.61.240",
"es.port" -> "8200",
"es.mapping.id" -> "id",
"es.index.auto.create" -> "true"))
.mode("append")
.save("my_label/_doc")
df.write.format("org.elasticsearch.spark.sql").options(
Map("es.nodes" -> "192.168.61.240",
"es.port" -> "8200",
"es.mapping.id" -> "id",
"es.index.auto.create" -> "true",
"es.write.operation" -> "upsert")).mode("append").save("my_label/_doc")
scala> import org.elasticsearch.spark.sql._
scala> df.saveToEs("my_label/_doc", Map("es.nodes" -> "192.168.61.240","es.port" -> "8200","es.mapping.id" -> "id", "es.index.auto.create" -> "true"))
package com.kuaidi100.bigdata.context
import java.util.Properties
object ElasticsearchUtils {
//获取es配置文件
val PROP: Properties = PropertiesUtil.getProperties("elasticsearch.properties")
//谢老师的集群
val ES_PERSONAL_BUSINESS_NODES = PROP.getProperty("es.nodes")
val ES_PERSONAL_BUSINESS_PORT = PROP.getProperty("es.port")
//个人业务的测试环境
val ES_TEST_NODES = PROP.getProperty("es.test.nodes")
val ES_TEST_PORT = PROP.getProperty("es.test.port")
//国际业务的ES集群
val ES_AGOGOPOST_PROD_NODES = PROP.getProperty("es.agogopost.prod.nodes")
val ES_AGOGOPOST_PROD_PORT = PROP.getProperty("es.agogopost.prod