spark批量输出ES之JavaEsSparkSQL.saveToEs elasticsearch

本文整理汇总了Java中org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL.saveToEs方法的典型用法代码示例。如果您正苦于以下问题:Java JavaEsSparkSQL.saveToEs方法的具体用法?Java JavaEsSparkSQL.saveToEs怎么用?Java JavaEsSparkSQL.saveToEs使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL的用法示例。

1.esDataFrameReadMetadata

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 

public void esDataFrameReadMetadata() throws Exception {
    DataFrame artists = artistsAsDataFrame();
    String target = "sparksql-test/scala-dataframe-read-metadata";
    JavaEsSparkSQL.saveToEs(artists, target);

    DataFrame dataframe = sqc.read().format("es").option("es.read.metadata", "true").load(target).where("id = 1");

    // Since _metadata field isn't a part of _source,
    // we want to check that it could be fetched in any position.
    assertEquals("sparksql-test", dataframe.selectExpr("_metadata['_index']").takeAsList(1).get(0).get(0));
    assertEquals("sparksql-test", dataframe.selectExpr("_metadata['_index']", "name").takeAsList(1).get(0).get(0));
    assertEquals("MALICE MIZER", dataframe.selectExpr("_metadata['_index']", "name").takeAsList(1).get(0).get(1));
    assertEquals("MALICE MIZER", dataframe.selectExpr("name", "_metadata['_index']").takeAsList(1).get(0).get(0));
    assertEquals("sparksql-test", dataframe.selectExpr("name", "_metadata['_index']").takeAsList(1).get(0).get(1));
}

2.esWriteDataFrame

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
/**
 * Write a {@link DataFrame} to the specified output
 *
 * @param name name of output table
 * @param df   dataframe containing the data
 */
public void esWriteDataFrame(String name, DataFrame df) {
    Map<String, String> props = config.getProperties(name);
    log.info("Writing to ElasticSearch: {}", props);
    JavaEsSparkSQL.saveToEs(df, props);
}

3.esSchemaRDD1Write

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 

public void esSchemaRDD1Write() throws Exception {
    JavaSchemaRDD schemaRDD = artistsAsSchemaRDD();

    String target = "sparksql-test/scala-basic-write";
    JavaEsSparkSQL.saveToEs(schemaRDD, target);
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
}

4.esSchemaRDD1WriteWithId

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; //导入方法依赖的package包/类
@Test
public void esSchemaRDD1WriteWithId() throws Exception {
    JavaSchemaRDD schemaRDD = artistsAsSchemaRDD();

    String target = "sparksql-test/scala-basic-write-id-mapping";
    JavaEsSparkSQL.saveToEs(schemaRDD, target, ImmutableMap.of(ES_MAPPING_ID, "id"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
    assertThat(RestUtils.exists(target + "/1"), is(true));
}

5.esSchemaRDD1WriteWithMappingExclude

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
@Test
public void esSchemaRDD1WriteWithMappingExclude() throws Exception {
    JavaSchemaRDD schemaRDD = artistsAsSchemaRDD();

    String target = "sparksql-test/scala-basic-write-exclude-mapping";
    JavaEsSparkSQL.saveToEs(schemaRDD, target,ImmutableMap.of(ES_MAPPING_EXCLUDE, "url"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), not(containsString("url")));
}

6.esdataFrame1Write

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; //导入方法依赖的package包/类
@Test
public void esdataFrame1Write() throws Exception {
    DataFrame dataFrame = artistsAsDataFrame();

    String target = "sparksql-test/scala-basic-write";
    JavaEsSparkSQL.saveToEs(dataFrame, target);
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
}

7.esDataFrame1WriteWithId

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; //导入方法依赖的package包/类
@Test
public void esDataFrame1WriteWithId() throws Exception {
    DataFrame dataFrame = artistsAsDataFrame();

    String target = "sparksql-test/scala-basic-write-id-mapping";
    JavaEsSparkSQL.saveToEs(dataFrame, target,
            ImmutableMap.of(ES_MAPPING_ID, "id"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
    assertThat(RestUtils.exists(target + "/1"), is(true));
}

8.esSchemaRDD1WriteWithMappingExclude

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; //导入方法依赖的package包/类
@Test
public void esSchemaRDD1WriteWithMappingExclude() throws Exception {
    DataFrame dataFrame = artistsAsDataFrame();

    String target = "sparksql-test/scala-basic-write-exclude-mapping";
    JavaEsSparkSQL.saveToEs(dataFrame, target,ImmutableMap.of(ES_MAPPING_EXCLUDE, "url"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), not(containsString("url")));
}

9.esDatasetWrite

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
@Test
public void esDatasetWrite() throws Exception {
     Dataset<Row> dataset = artistsAsDataset();

  String target = "sparksql-test-scala-basic-write/data";
     JavaEsSparkSQL.saveToEs(dataset, target);
  assertTrue(RestUtils.exists(target));
  assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
}

10.esDataset1WriteWithId

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
@Test
   public void esDataset1WriteWithId() throws Exception {
       Dataset<Row> dataset = artistsAsDataset();

    String target = "sparksql-test-scala-basic-write-id-mapping/data";
       JavaEsSparkSQL.saveToEs(dataset, target,
            ImmutableMap.of(ES_MAPPING_ID, "id"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
    assertThat(RestUtils.exists(target + "/1"), is(true));
}

11.esSchemaRDD1WriteWithMappingExclude

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
@Test
public void esSchemaRDD1WriteWithMappingExclude() throws Exception {
    Dataset<Row> dataset = artistsAsDataset();

    String target = "sparksql-test-scala-basic-write-exclude-mapping/data";
    JavaEsSparkSQL.saveToEs(dataset, target,
            ImmutableMap.of(ES_MAPPING_EXCLUDE, "url"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), not(containsString("url")));
}

12.esdataFrame1Write

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
@Test
public void esdataFrame1Write() throws Exception {
    DataFrame dataFrame = artistsAsDataFrame();

    String target = "sparksql-test-scala-basic-write/data";
    JavaEsSparkSQL.saveToEs(dataFrame, target);
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
}

13.esdataFrame1WriteWithId

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
@Test
public void esdataFrame1WriteWithId() throws Exception {
    DataFrame dataFrame = artistsAsDataFrame();

    String target = "sparksql-test-scala-basic-write-id-mapping/data";
    JavaEsSparkSQL.saveToEs(dataFrame, target,
            ImmutableMap.of(ES_MAPPING_ID, "id"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), containsString("qqq"));
    assertThat(RestUtils.exists(target + "/1"), is(true));
}

14.esSchemaRDD1WriteWithMappingExclude

import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL; 
@Test
public void esSchemaRDD1WriteWithMappingExclude() throws Exception {
    DataFrame dataFrame = artistsAsDataFrame();

    String target = "sparksql-test-scala-basic-write-exclude-mapping/data";
    JavaEsSparkSQL.saveToEs(dataFrame, target,ImmutableMap.of(ES_MAPPING_EXCLUDE, "url"));
    assertTrue(RestUtils.exists(target));
    assertThat(RestUtils.get(target + "/_search?"), not(containsString("url")));
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值