Spingboot 2.1.1.RELEASE 集成 Elsaticseach6.5.4

事先请先自行Elasticsearch 6.5.4 集群搭建,并配置好Ik分词器
IK分词器安装:
https://github.com/medcl/elasticsearch-analysis-ik

拼音分析器:
https://github.com/medcl/elasticsearch-analysis-pinyin



pom.xml文件的依赖:
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-web</artifactId>
    <version>2.1.1.RELEASE</version>
</dependency>
<dependency>
    <groupId>org.projectlombok</groupId>
    <artifactId>lombok</artifactId>
    <version>1.18.4</version>
</dependency>
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-test</artifactId>
    <version>2.1.1.RELEASE</version>
</dependency>
<!-- 这里要将elasticsearch的依赖排除,因为版本太低了,出现版本冲突 -->
<dependency>
    <groupId>org.elasticsearch.client</groupId>
    <artifactId>transport</artifactId>
    <exclusions>
        <exclusion>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
        </exclusion>
    </exclusions>
    <version>6.5.4</version>
</dependency>
<!-- 这里加入新的elasticsearch依赖,版本匹配 -->
<dependency>
    <groupId>org.elasticsearch</groupId>
    <artifactId>elasticsearch</artifactId>
    <version>6.5.4</version>
</dependency>
<dependency>
    <groupId>javax.validation</groupId>
    <artifactId>validation-api</artifactId>
    <version>2.0.1.Final</version>
</dependency>
先添加一个配置文件,进行配置TransportClient的对象实例构建

@Configuration
public class MyConfig {

    @Bean
    public TransportClient client() throws UnknownHostException {
        Settings settings = Settings.builder().put("cluster.name", "ydf").build();

        TransportClient client = new PreBuiltTransportClient(settings);
        client.addTransportAddress(new TransportAddress(InetAddress.getByName("192.168.42.128"), 9300));

        return client;
    }
}
TransportClient 测试类:

@Slf4j
@RunWith(SpringRunner.class)
@SpringBootTest
public class TransportClientTest {

    @Autowired
    private TransportClient transportClient;

    //1. 创建索引
    //2. 创建mapping
    //3. 插入文档内容
    //4. 查询文档

    /**
     * ik_max_word: 会将文本做最细粒度的拆分,比如会将“中华人民共和国国歌”拆分为“中华人民共和国,中华人民,中华,华人,人民共和国,人民,人,民,共和国,共和,和,国国,国歌”,会穷尽各种可能的组合,适合 Term Query;
     * <p>
     * ik_smart: 会做最粗粒度的拆分,比如会将“中华人民共和国国歌”拆分为“中华人民共和国,国歌”,适合 Phrase 查询。
     *
     * @throws IOException
     */
    @Test
    public void createIndexAndMapping() throws IOException {
        XContentBuilder mapping = XContentFactory.jsonBuilder()
                .startObject()
                .startObject("properties")
                .startObject("content")
                .field("type", "text")
                .field("analyzer", "ik_max_word")
                .field("search_analyzer", "ik_smart")
                .endObject()
                .endObject()
                .endObject();
        IndicesExistsResponse exists = transportClient.admin().indices().exists(new IndicesExistsRequest("index")).actionGet();
        if (exists.isExists()) {
            log.info(" 索引名称:index已存在。。。");
            AcknowledgedResponse delete = transportClient.admin().indices().prepareDelete("index").execute().actionGet();
            log.info(" 索引名称:index删除状态:{}", delete.isAcknowledged());
        }
        CreateIndexResponse index = transportClient.admin().indices().prepareCreate("index").addMapping("_doc", mapping).execute().actionGet();
        log.info(" 索引名称:{}已创建", index.index());
    }


    @Test
    public void addDoc() throws IOException {
        String[] contents = new String[]{"美国留给伊拉克的是个烂摊子吗",
                "公安部:各地校车将享最高路权",
                "中韩渔警冲突调查:韩警平均每天扣1艘中国渔船",
                "中国驻洛杉矶领事馆遭亚裔男子枪击 嫌犯已自首"};


        BulkRequestBuilder bulk = transportClient.prepareBulk();
        for (String content : contents) {
            XContentBuilder doc = XContentFactory.jsonBuilder()
                    .startObject()
                    .field("content", content)
                    .endObject();

            bulk.add(transportClient.prepareIndex("index", "_doc").setSource(doc));
        }
        BulkResponse res = bulk.get();
        boolean b = res.hasFailures();
        String s = res.buildFailureMessage();
        log.info("批量添加操作是否有错:{}, 错误消息:{}, 响应对象:", b, s, res.getTook());
    }

    @Test
    public void query() {
        SearchResponse res = transportClient
                .prepareSearch("index")
                .setTypes("_doc")
                .setQuery(new MatchQueryBuilder("content", "中国"))
                .highlighter(new HighlightBuilder().preTags("<em>").postTags("</em>").field("content"))
                .get();
        SearchHits hits = res.getHits();
        for (SearchHit hit : hits) {
            log.info("{},   {}", hit.getSourceAsString(), hit.getHighlightFields().get("content"));
        }
    }
}

 

---------- query result:

TransportClientTest   : {"content":"中国驻洛杉矶领事馆遭亚裔男子枪击 嫌犯已自首"},   [content], fragments[[<em>中国</em>驻洛杉矶领事馆遭亚裔男子枪击 嫌犯已自首]]
TransportClientTest   : {"content":"中韩渔警冲突调查:韩警平均每天扣1艘中国渔船"},   [content], fragments[[中韩渔警冲突调查:韩警平均每天扣1艘<em>中国</em>渔船]]
 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值