Elasticsearch基于TransportClient的java调用

之前使用rest方式调用,不仅在大数据量导入的情况下会有数据丢失的情况,而且编写非常麻烦,就拿mapping举例,全是字符串拼接,一个斜杠少写了就over了,代码看上去很乱.

1.创建索引

public void createIndex() {
    	EsUtils.transportClient().admin().indices().create(new CreateIndexRequest("products")).actionGet();
 }

2.创建mapping结构

/**
 * 创建mapping结构,需先创建索性
 */
@Test
public void createMapping() {
    PutMappingRequest mapping = Requests.putMappingRequest("product").type("product_info").source(getMapping());
    EsUtils.transportClient().admin().indices().putMapping(mapping).actionGet();
}
/**
 * 建立mapping
 *
 * @return
 */
public static XContentBuilder getMapping() {
    XContentBuilder mapping = null;
    try {
        mapping = jsonBuilder()
                .startObject()
                .startObject("properties")
                .startObject("original_img")
                .field("type", "keyword")
                .endObject()
                .startObject("title")
                .field("type", "keyword")
                .endObject()
                .startObject("model")
                .field("type", "keyword")
                .endObject()
                .startObject("title_en")
                .field("type", "keyword")
                .endObject()
                .startObject("barcode")
                .field("type", "keyword")
                .endObject()
                .startObject("number")
                .field("type", "keyword")
                .endObject()
                .startObject("thumb")
                .field("type", "keyword")
                .endObject()
                .startObject("content")
                .field("type", "text")
                .endObject()
                .startObject("is_open")
                .field("type", "boolean")
                .endObject()
                .startObject("status")
                .field("type", "boolean")
                .endObject()
                .startObject("is_del")
                .field("type", "boolean")
                .endObject()
                .endObject()
                .endObject();

    } catch (IOException e) {
        e.printStackTrace();
    }
    return mapping;
}

**一定要注意最后的endObject()的个数

3.添加和替换别名

/**
 * 添加别名
 */
public void createAlias(){
    EsUtils.transportClient().admin().indices().prepareAliases().addAlias("product","my_index").execute().actionGet();
}
/**
 * 使用别名替换索引
 */
@Test
public void replaceAlias(){
    EsUtils.transportClient().admin().indices().prepareAliases()
            .removeAlias("my_index_v1","my_index")
            .addAlias("product","my_index")
            .execute().actionGet();
}

**别名是很有用的,mapping不能修改,只能删除重建,别名就可以无缝连接了

4.删除

/**
 * 删除索引
 */
@Test
public void deleteIndex() {
    IndicesExistsResponse indicesExistsResponse = EsUtils.transportClient().admin().indices()
            .exists(new IndicesExistsRequest(new String[]{"product"}))
            .actionGet();
    if (indicesExistsResponse.isExists()) {
        EsUtils.transportClient().admin().indices().delete(new DeleteIndexRequest("product"))
                .actionGet();
    }
}

/**
 * 删除Index下的某个Type
 */
@Test
public void deleteType() {
    EsUtils.transportClient().prepareDelete().setIndex("IndexName").setType("TypeName").execute().actionGet();
}

/**
 * 删除doc文档
 */
@Test
public void deleteDoc() {
    EsUtils.transportClient().prepareDelete().setIndex("IndexName").setType("TypeName").setId("id").execute().actionGet();
}

/**
 * 使用matchAllQuery删除所有文档
 */
@Test
public void deleteAll(){
	   DeleteByQueryAction.INSTANCE.newRequestBuilder(EsUtils.transportClient())
         .source("product")
         .filter(QueryBuilders.matchAllQuery())
         .get();
}

5.查询

/**
 * 查询
 */
@Test
public void select() {
    Client client = EsUtils.transportClient();
    // 组合查询条件
    BoolQueryBuilder query = QueryBuilders.boolQuery();
    query.must(QueryBuilders.termQuery("title", "德国"));
    // 查询
    SearchResponse response = client
            // index
            .prepareSearch("product")
            // type
            .setTypes("product_info")
            // 查询条件
            .setQuery(query)
            .setFrom(0)
            .setSize(60)
            .execute()
            .actionGet();
    // 响应内容
    SearchHits shs = response.getHits();
    for (SearchHit hit : shs) {
        System.out.println(hit.getSourceAsString());
    }
    client.close();
}

**这个就比rest爽太多了啊,基本语法我就不多说了

6.BulkProcessor批次导入

import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.net.InetAddress;
import java.net.UnknownHostException;

@Configuration
public class EsBulkProcessor {


public static final Logger logger = LoggerFactory.getLogger(EsBulkProcessor.class);

@Bean
public BulkProcessor bulkProcessor() throws UnknownHostException {
//docker-cluster是cluster.name的值,访问http://localhost:9200/可以看到cluster_name
    Settings settings = Settings.builder().put("cluster.name", "docker-cluster").build();

    Client client = new PreBuiltTransportClient(settings)
            .addTransportAddress(new TransportAddress(InetAddress.getByName("localhost"), Integer.parseInt("9300")));

    return BulkProcessor.builder(client, new BulkProcessor.Listener() {
        @Override
        public void beforeBulk(long l, BulkRequest bulkRequest) {
            System.out.println("请求信息--->"+bulkRequest.requests());
            System.out.println("请求数量--->"+bulkRequest.numberOfActions());
        }

        @Override
        public void afterBulk(long l, BulkRequest bulkRequest, BulkResponse bulkResponse) {
            System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据成功---");
            System.out.println("响应结果--->"+bulkResponse.buildFailureMessage());
        }

        @Override
        public void afterBulk(long l, BulkRequest bulkRequest, Throwable throwable) {
            System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据失败---");
            logger.error("{} data bulk failed,reason :{}", bulkRequest.numberOfActions(), throwable);
        }
        //每添加1000个request,执行一次bulk操作
    }).setBulkActions(1000)
            //每达到5M的请求size时,执行一次bulk操作
            .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.MB))
            //每5s执行一次bulk操作
            .setFlushInterval(TimeValue.timeValueSeconds(5))
            //默认是1,表示积累bulk requests和发送bulk是异步的
            .setConcurrentRequests(1)
            //当ES由于资源不足发生异常
            .setBackoffPolicy(BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(100), 3))
            .build();
	}
}

调用

import com.fasterxml.jackson.databind.ObjectMapper;

@Autowired
private BulkProcessor bulkProcessor;
private ObjectMapper objectMapper = new ObjectMapper();
/**
 * Transport批次导入数据
 * @param proDatas
 */
public void esTransportBatchPush(List<JSONObject> proDatas) {
	for(JSONObject proData :proDatas){
        try {
            byte[] json = objectMapper.writeValueAsBytes(proData);
            // 新版的API中使用setSource时,参数的个数必须是偶数,需加XContentType.JSON
            bulkProcessor.add(new IndexRequest("product", "product_info", proData.get("id").toString()).source(json, XContentType.JSON));

        } catch (JsonProcessingException e) {
            e.printStackTrace();
        }
    }
    bulkProcessor.close();
}
  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值