ElasticSearch--使用java api增删改查

  • 依赖

<dependencies>
    <dependency>
        <groupId>org.elasticsearch.client</groupId>
        <artifactId>transport</artifactId>
        <version>5.2.2</version>
    </dependency>
    <dependency>
        <groupId>org.apache.logging.log4j</groupId>
        <artifactId>log4j-api</artifactId>
        <version>2.7</version>
    </dependency>
    <dependency>
        <groupId>org.apache.logging.log4j</groupId>
        <artifactId>log4j-core</artifactId>
        <version>2.7</version>
    </dependency>
</dependencies>
  • log4j2.properties

appender.console.type=Console
appender.console.name=console
appender.console.layout.type=PatternLayout

rootLogger.level=info
rootLogger.appenderRef.console.ref=console
  • 简单增删改查:

注意:

服务默认端口是9300

Web管理平台端口是9200

package com.zeng;

import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;

import java.net.InetAddress;
import java.util.HashMap;
import java.util.Map;

public class EmployeeCRUDTest {

    public static void main(String[] args) {
        TransportClient client=null;
        try {
            //构建client
            Settings settings= Settings.builder()
                    .put("cluster.name","elasticsearch")
                    .build();

            client=new PreBuiltTransportClient(settings);
            client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"),9300));

            createEmployee(client);
//            getEmployee(client);
//            updateEmployee(client);
//            deleteEmployee(client);

        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            if(client!=null){
                client.close();
            }
        }
    }
    //修改文档
    private static void updateEmployee(TransportClient client) {
        Map<String, Object> source=new HashMap<String, Object>();
        source.put("name","jack");
        source.put("age",21);
        source.put("position","technique");
        source.put("country","china");
        source.put("join_date","2017-01-01");
        source.put("salary",17000);

        UpdateResponse response = client.prepareUpdate("company", "employee", "1").setDoc(source).get();
        System.out.println(response.getResult());
    }
    //删除文档

    private static void deleteEmployee(TransportClient client) {
        DeleteResponse response = client.prepareDelete("company", "employee", "1").get();
        System.out.println(response.getResult());
    }

    //获取文档
    private static void getEmployee(TransportClient client) {
        GetResponse response = client.prepareGet("company", "employee", "1").get();
        System.out.println(response.getSourceAsString());
    }

    //新增文档 或 全量修改
    private static void createEmployee(TransportClient client) {
        Map<String, Object> source=new HashMap<String, Object>();
        source.put("name","jack");
        source.put("age",20);
        source.put("position","technique");
        source.put("country","china");
        source.put("join_date","2017-01-01");
        source.put("salary",10000);

        IndexResponse response = client.prepareIndex("company", "employee", "1")
                .setSource(source)
                .get();
        System.out.println(response.getResult());
    }

}
  • 聚合分析和复杂搜索

package com.zeng;

import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg;
import org.elasticsearch.transport.client.PreBuiltTransportClient;

import java.net.InetAddress;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class EmployeeSearchTest {

    public static void main(String[] args) {
        TransportClient client=null;
        try {
            //构建client
            Settings settings= Settings.builder()
                    .put("cluster.name","elasticsearch")
                    .build();

            client=new PreBuiltTransportClient(settings);
            client.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"),9300));

            prepareDocuments(client);

            //等待索引建立操作完成
            Thread.sleep(1500);

            execureSearch(client);

            execureAggr(client);

        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            if(client!=null){
                client.close();
            }
        }
    }
    //聚合搜索:按country分组,再按入职时间分组,并计算每个分组的平均薪资
    private static void execureAggr(TransportClient client) {
        SearchResponse response = client.prepareSearch("company")
                .addAggregation(
                        AggregationBuilders.terms("group_by_country").field("country")
                                .subAggregation(
                                        AggregationBuilders.dateHistogram("group_by_join_date")
                                                .field("join_date")
                                                .dateHistogramInterval(DateHistogramInterval.YEAR)
                                                .subAggregation(AggregationBuilders.avg("avg_by_salary").field("salary"))
                                )
                ).setSize(0).get();
        System.out.println("聚合json:"+response.toString());
        Map<String, Aggregation> stringAggregationMap = response.getAggregations().asMap();
        StringTerms group_by_country= (StringTerms) stringAggregationMap.get("group_by_country");

        System.out.println("按country分组有"+group_by_country.getBuckets().size()+"个国家,");
        List<Terms.Bucket> buckets = group_by_country.getBuckets();
        for(Terms.Bucket bucket:buckets){

            System.out.print(bucket.getKeyAsString()+"");
            Histogram group_by_join_date = (InternalDateHistogram) bucket.getAggregations().asMap().get("group_by_join_date");
            List<Histogram.Bucket> bucketsList = group_by_join_date.getBuckets();
            System.out.println("按时间分组有:");
            for (Histogram.Bucket bucket1:bucketsList){
                InternalAvg avg_by_salary = bucket1.getAggregations().get("avg_by_salary");
                long docCount = bucket1.getDocCount();
                if(docCount>0)
                    System.out.println("\t"+bucket1.getKeyAsString()+"("+docCount+"个),平均工资为:"+avg_by_salary.getValue()+"元");
            }
            System.out.println();
        }

    }

    //执行搜索:查找position为technique,age在10至50之间,并进行分页
    private static void execureSearch(TransportClient client) {
        SearchResponse response = client.prepareSearch("company")
                .setTypes("employee")
                .setQuery(QueryBuilders.matchQuery("position", "technique"))
                .setPostFilter(QueryBuilders.rangeQuery("age").from(10).to(50))
                .setFrom(0).setSize(3)
                .get();
        SearchHit[] hits = response.getHits().getHits();
        System.out.println("获取到的结果数:"+hits.length);
        for (int i = 0; i < hits.length; i++) {
            SearchHit hit = hits[i];
            System.out.println(hit.getSourceAsString());
        }
    }

    private static String[] names=new String[]{"jim","sam","jack","jan","hellen","tom","stewen","engle","gordon","ena"};
    //准备数据:增加多个文档用来搜索
    private static void prepareDocuments(TransportClient client) {
        try {
            DeleteIndexResponse response = client.admin().indices().prepareDelete("company").execute().actionGet();
            System.out.println("删除索引库结果:"+response.isAcknowledged());
        }catch (Exception e){
            System.out.println("删除索引库结果:"+e.getMessage());
        }

        try {
            CreateIndexRequestBuilder cib=client.admin().indices().prepareCreate("company");
//            XContentBuilder source = XContentFactory.jsonBuilder()
//                    .startObject()
//                        .startObject("properties") //设置之定义字段
//                            .startObject("country")
//                                .field("type","text")
//                                .field("fielddata",true) //设置数据类型
//                            .endObject()
//                        .endObject()
//                    .endObject();

            //聚合搜索时需要设置fielddata为true
            Map<String, Object> countryMap=new HashMap<String, Object>(2);
            countryMap.put("type","text");
            countryMap.put("fielddata",true);

            Map<String, Object> propertiesMap=new HashMap<String, Object>(1);
            propertiesMap.put("country",countryMap);

            Map<String, Object> source=new HashMap<String, Object>(1);
            source.put("properties",propertiesMap);


            cib.addMapping("employee", source);

            CreateIndexResponse res=cib.execute().actionGet();

            System.out.println("添加映射成功:"+res.isAcknowledged());
        }catch (Exception e){
            System.out.println("添加映射成功结果:"+e.getMessage());
            System.exit(1);
        }

        for (int i = 0; i < 10; i++) {
            Map<String, Object> source=new HashMap<String, Object>();
            source.put("id",i+1);
            source.put("name",names[i]);
            source.put("age",(i+1)*5);
            source.put("position","technique");
            source.put("country",i%2==0?"china":"usa");
            source.put("join_date","200"+(i%5)+"-01-01");
            source.put("salary",i%2==0?10000+i:6000+i);

            IndexResponse response1 = client.prepareIndex("company", "employee", i+"")
                    .setSource(source)
                    .get();
            System.out.println(response1.getResult());
        }
    }
}

源码链接: https://pan.baidu.com/s/1QyfQYAhvxDI5CtMuS5nm5Q

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值