ElasticsearchRepository条件查询+分页+排序+字段聚合

环境搭建这里就不介绍,只贴上关键的代码供大家参考!

	<dependency>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
	</dependency>
涉及中英文,这里只贴出部分
package cn.sciencedb.common.domain.es;

import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;

import java.io.Serializable;
import java.util.Date;

/**
 * ElasticSearch 索引科学数据对象-英文
 * 
 * @author 张益达
 * @date 2018-08-13
 */

@Getter
@Setter
@ToString
@Document(indexName = "dataset_index_en", type = "dataset")
public class DataSetIndexEn implements Serializable{

    private static final long serialVersionUID = 1L;

    @Id
    private String id;// ES 主键

    @Field(type = FieldType.Text, analyzer = "ik_max_word", searchAnalyzer = "ik_max_word")
    private String title;// 数据集标题

    @Field(type = FieldType.Text, analyzer = "ik_max_word", searchAnalyzer = "ik_max_word")
    private String introduction;// 数据集简介

    @Field(type = FieldType.Text, analyzer = "ik_max_word", searchAnalyzer = "ik_max_word")
    private String keyword;// 关键字

    private String author;// 数据集作者

    private String dataSetType;// 数据集类型

    private Date publishDate;// 数据集发布时间

    private String dataSetId; // 数据集id

    private String code;    //数据集标识 对应projectId

    private String taxonomy;    //学科

    private String pictureUrl; //图ong片url

    private long viewCount; //浏览数
}

controller

/**
     * 热门数据
     * @param code 数据集标识 对应projectId
     * @param dataSetType 数据集类型
     * @param locale 国际化
     * @return
     */
    @ApiOperation("热门数据")
    @GetMapping("/recommend2")
    public SdbResult recommend2(@RequestParam(name = "page", required = false, defaultValue = "0") String page,
                                @RequestParam(name = "size", required = false, defaultValue = "10") String size,
                                @RequestParam(name = "code", required = false) String code,
                                @RequestParam(name = "dataSetType", required = false) String dataSetType,
                                Locale locale) {
        // 获取分页参数
        int pageNum = Integer.parseInt(page);
        int pageSize = Integer.parseInt(size);
        Pageable pageable = PageRequest.of(pageNum, pageSize);
        return searchService.recommendData2(code, dataSetType, locale,pageable);
    }

这里是重点!!!
serviceimpl

	/**
     * 检索聚合查询,这里对taxonomy(学科分类进行聚合)
     * @param params
     * @return
     */
    @Override
    public SdbResult searchQuery(String q, String dataSetType, String taxonomy, String code, String username, boolean ordertime, boolean orderinfluence, String page, String size, Locale locale) {

        Map<String, Object> map = Maps.newHashMap();
        Pageable pageable = PageRequest.of(Integer.parseInt(page), Integer.parseInt(size));
        //检索条件
        BoolQueryBuilder bqb = QueryBuilders.boolQuery();
        if(StringUtils.isNotEmpty(dataSetType))
            bqb.must(QueryBuilders.matchPhraseQuery("dataSetType", dataSetType));
        if(StringUtils.isNotEmpty(code))
            bqb.must(QueryBuilders.matchPhraseQuery("code", code));
        if(StringUtils.isNotEmpty(taxonomy))
            bqb.must(QueryBuilders.matchPhraseQuery("taxonomy", taxonomy));
        if(StringUtils.isNotEmpty(q))
            bqb.must(QueryBuilders.multiMatchQuery(q, "title", "keyword", "introduction"));
        //排序条件
        FieldSortBuilder fsb = null;
        if(ordertime){
            fsb = SortBuilders.fieldSort("publishDate").order(SortOrder.DESC);
        }
        if(orderinfluence){
            fsb = SortBuilders.fieldSort("referenceNumber").order(SortOrder.DESC);
        }
		//聚合条件
        TermsAggregationBuilder builder1 = AggregationBuilders.terms("taxonomy").field("taxonomy.keyword");
        TermsAggregationBuilder builder2 = AggregationBuilders.terms("year").field("year.keyword");
        TermsAggregationBuilder builder = builder1.subAggregation(builder2);
        //构建查询
        SearchQuery query = new NativeSearchQueryBuilder()
                .withQuery(bqb)
                .withSort(fsb)
                .addAggregation(builder)
                .withPageable(pageable)
                .build();
        if ("en".equals(locale.getLanguage())) {
            AggregatedPage<DataSetIndexEn> search = (AggregatedPage)sdoIndexEnRepository.search(query);
            long totalElements = search.getTotalElements();
            int totalPages = search.getTotalPages();
            List<DataSetIndexEn> content = search.getContent();
			Terms term1 = (Terms)search.getAggregations().getAsMap().get("taxonomy");
            log.debug("term1============"+term1.toString());
            for (Terms.Bucket bucket : term1.getBuckets()) {
                log.debug("一级内容"+bucket.toString());
                map.put(bucket.getKey().toString(), bucket.getDocCount());
                Terms terms_year = bucket.getAggregations().get("year");
                for (Terms.Bucket year_bucket : terms_year.getBuckets()) {
                    log.debug("二级内容"+year_bucket.toString());
                    map.put(year_bucket.getKey().toString(), year_bucket.getDocCount());
                }
            }
            map.put("total",totalElements);
            map.put("totalPages",totalPages);
            map.put("recommendData",content);
        } else {
            AggregatedPage<DataSetIndexZh> search = (AggregatedPage)sdoIndexZhRepository.search(query);
            long totalElements = search.getTotalElements();
            int totalPages = search.getTotalPages();
            List<DataSetIndexZh> content = search.getContent();
			Terms term1 = (Terms)search.getAggregations().getAsMap().get("taxonomy");
            log.debug("term1============"+term1.toString());
            for (Terms.Bucket bucket : term1.getBuckets()) {
                log.debug("一级内容"+bucket.toString());
                map.put(bucket.getKey().toString(), bucket.getDocCount());
                Terms terms_year = bucket.getAggregations().get("year");
                for (Terms.Bucket year_bucket : terms_year.getBuckets()) {
                    log.debug("二级内容"+year_bucket.toString());
                    map.put(year_bucket.getKey().toString(), year_bucket.getDocCount());
                }
            }
            map.put("total",totalElements);
            map.put("totalPages",totalPages);
            map.put("recommendData",content);
        }
        return success(map);
    }
//判断检索词是中文还是英文
private static Pattern compile = Pattern.compile("[\\u0391-\\uFFE5]");
    /**
     * 检索
     *
     * @param q
     *            检索字段
     * @param locale
     *            国际化
     * @param pageable
     *            分页参数
     * @param dataSetType
     *            数据集类型
     * @param taxonomy
     *            学科分类
     * @return SdbResult
     */
    @Override
    public SdbResult search(String q, String dataSetType, String taxonomy, String code, Locale locale, String username,
        Long rank, boolean ordertime, boolean orderinfluence, boolean view, Pageable pageable) {
        Map<String, Object> map = Maps.newHashMap();
        String language = locale.getLanguage();
        // 创建查询条件
        BoolQueryBuilder builder = QueryBuilders.boolQuery();
        if (StringUtils.isNotEmpty(code))
            builder.must(QueryBuilders.matchPhraseQuery("code", code));
        if (StringUtils.isNotEmpty(q)) {
            // builder.should(QueryBuilders.multiMatchQuery(q, "title","keyword","author"));
            builder.should(QueryBuilders.matchPhraseQuery("title", q))
                .should(QueryBuilders.matchPhraseQuery("author", q))
                .should(QueryBuilders.matchPhraseQuery("keyword", q))
                .should(QueryBuilders.matchPhraseQuery("introduction", q));
        }
        if (StringUtils.isNotEmpty(dataSetType)) {
            if (dataSetType.contains(",")) {
                String[] split = dataSetType.split(",");
                builder.should(QueryBuilders.termsQuery("dataSetType", split));
            } else {
                builder.must(QueryBuilders.matchPhraseQuery("dataSetType", dataSetType));
            }
        }
        if (StringUtils.isNotEmpty(taxonomy)) {
            if (taxonomy.contains(",")) {
                String[] split = dataSetType.split(",");
                builder.should(QueryBuilders.termsQuery("taxonomy", split));
            } else {
                builder.must(QueryBuilders.matchPhraseQuery("taxonomy", taxonomy));
            }
        }
        if (null != rank) {
            long time = System.currentTimeMillis();
            long i = 24 * 3600 * 1000 * rank;
            long l = time - i;
            builder.must(QueryBuilders.rangeQuery("publishDate").from(l).to(time));
        }
        // 排序条件
        FieldSortBuilder fsb = null;
        if (orderinfluence) {
            fsb = SortBuilders.fieldSort("referenceNumber").order(SortOrder.DESC);
        } else if (view) {
            fsb = SortBuilders.fieldSort("clicks").order(SortOrder.DESC);
        } else if (!ordertime) {
            fsb = SortBuilders.fieldSort("publishDate").order(SortOrder.ASC);
        } else {
            fsb = SortBuilders.fieldSort("publishDate").order(SortOrder.DESC);
        }
        // 设置高亮,使用默认的highlighter高亮器
        String[] fields = {"title", "author", "keyword", "introduction"};
        String preTags = "<span style='background-color:yellow'>";
        String postTags = "</span>";
        // 构建检索条件
        SearchQuery queryBuilder =
            new NativeSearchQueryBuilder().withQuery(builder).withPageable(pageable).withSort(fsb)
                .withHighlightFields(new HighlightBuilder.Field(fields[0]).preTags(preTags).postTags(postTags),
                    new HighlightBuilder.Field(fields[1]).preTags(preTags).postTags(postTags),
                    new HighlightBuilder.Field(fields[2]).preTags(preTags).postTags(postTags),
                    new HighlightBuilder.Field(fields[3]).preTags(preTags).postTags(postTags))
                .build();
        boolean matche = true;
        if (StringUtils.isEmpty(q)) {
            if ("en".equals(language)) {
                matche = false;
            }
        } else {
            matche = compile.matcher(q).find();
        }
        if (!matche) {
            Page<SdbIndexEn> search =
                elasticsearchTemplate.queryForPage(queryBuilder, SdbIndexEn.class, new SearchResultMapper() {
                    @Override
                    public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz,
                        Pageable pageable) {
                        ArrayList<SdbIndexEn> sdbs = new ArrayList<SdbIndexEn>();
                        SearchHits hits = response.getHits();
                        for (SearchHit searchHit : hits) {
                            if (hits.getHits().length <= 0) {
                                return null;
                            }
                            SdbIndexEn sdb = new SdbIndexEn();
                            sdb.setId(String.valueOf(searchHit.getId()));
                            HighlightField fragments = searchHit.getHighlightFields().get(fields[0]);
                            if (fragments != null) {
                                String title = fragments.getFragments()[0].toString();
                                sdb.setTitle(title);
                            } else {
                                String sourceTitle = (String)searchHit.getSourceAsMap().get(fields[0]);
                                sdb.setTitle(sourceTitle);
                            }
                            HighlightField highlightField = searchHit.getHighlightFields().get(fields[1]);
                            if (highlightField != null) {
                                String author = highlightField.getFragments()[0].toString();
                                sdb.setAuthor(author);
                            } else {
                                String sourceAuthor = (String)searchHit.getSourceAsMap().get(fields[1]);
                                sdb.setAuthor(sourceAuthor);
                            }
                            HighlightField highlightField1 = searchHit.getHighlightFields().get(fields[2]);
                            if (highlightField1 != null) {
                                String keyword = highlightField1.getFragments()[0].toString();
                                sdb.setKeyword(keyword);
                            } else {
                                String keyword = (String)searchHit.getSourceAsMap().get(fields[2]);
                                sdb.setKeyword(keyword);
                            }
                            HighlightField highlightField2 = searchHit.getHighlightFields().get(fields[3]);
                            if (highlightField2 != null) {
                                String instro = highlightField2.getFragments()[0].toString();
                                sdb.setIntroduction(instro);
                            } else {
                                String instro = (String)searchHit.getSourceAsMap().get(fields[3]);
                                sdb.setIntroduction(instro);
                            }
                            copyDataEn(searchHit, sdb);
                            sdbs.add(sdb);
                        }
                        return new AggregatedPageImpl<T>((List<T>)sdbs, pageable, response.getHits().getTotalHits());
                    }

                    @Override
                    public <T> T mapSearchHit(SearchHit searchHit, Class<T> aClass) {
                        return null;
                    }
                });
            long totalElements = search.getTotalElements();
            int totalPages = search.getTotalPages();
            List<SdbIndexEn> content = search.getContent();
            map.put("total", totalElements);
            map.put("totalPages", totalPages);
            map.put("recommendData", content);
        } else {
            Page<SdbIndexZh> search =
                elasticsearchTemplate.queryForPage(queryBuilder, SdbIndexZh.class, new SearchResultMapper() {

                    @Override
                    public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz,
                        Pageable pageable) {
                        ArrayList<SdbIndexZh> sdbs = new ArrayList<SdbIndexZh>();
                        SearchHits hits = response.getHits();
                        for (SearchHit searchHit : hits) {
                            if (hits.getHits().length <= 0) {
                                return null;
                            }
                            SdbIndexZh sdb = new SdbIndexZh();
                            sdb.setId(String.valueOf(searchHit.getId()));
                            HighlightField fragments = searchHit.getHighlightFields().get(fields[0]);
                            if (fragments != null) {
                                String title = fragments.getFragments()[0].toString();
                                sdb.setTitle(title);
                            } else {
                                String sourceTitle = (String)searchHit.getSourceAsMap().get(fields[0]);
                                sdb.setTitle(sourceTitle);
                            }
                            HighlightField highlightField = searchHit.getHighlightFields().get(fields[1]);
                            if (highlightField != null) {
                                String author = highlightField.getFragments()[0].toString();
                                sdb.setAuthor(author);
                            } else {
                                String sourceAuthor = (String)searchHit.getSourceAsMap().get(fields[1]);
                                sdb.setAuthor(sourceAuthor);
                            }
                            HighlightField highlightField1 = searchHit.getHighlightFields().get(fields[2]);
                            if (highlightField1 != null) {
                                String keyword = highlightField1.getFragments()[0].toString();
                                sdb.setKeyword(keyword);
                            } else {
                                String keyword = (String)searchHit.getSourceAsMap().get(fields[2]);
                                sdb.setKeyword(keyword);
                            }
                            HighlightField highlightField2 = searchHit.getHighlightFields().get(fields[3]);
                            if (highlightField2 != null) {
                                String instro = highlightField2.getFragments()[0].toString();
                                sdb.setIntroduction(instro);
                            } else {
                                String instro = (String)searchHit.getSourceAsMap().get(fields[3]);
                                sdb.setIntroduction(instro);
                            }
                            copyData(searchHit, sdb);
                            sdbs.add(sdb);
                        }
                        return new AggregatedPageImpl<T>((List<T>)sdbs, pageable, response.getHits().getTotalHits());
                    }

                    @Override
                    public <T> T mapSearchHit(SearchHit searchHit, Class<T> aClass) {
                        return null;
                    }
                });
            long totalElements = search.getTotalElements();
            int totalPages = search.getTotalPages();
            List<SdbIndexZh> content = search.getContent();
            map.put("total", totalElements);
            map.put("totalPages", totalPages);
            map.put("recommendData", content);
        }
        return success(map);
    }

Repository

/**
 * 中文数据Repository
 * 
 * @author 张益达
 * @date 2018年8月23日
 */
@Repository
public interface DataSetZhRepository extends ElasticsearchRepository<DataSetIndexZh, String> {

    List<DataSetIndexZh> findByDataSetType(String dataSetType, Pageable pageable);

    List<DataSetIndexZh> findByCode(String code,Pageable pageable);

    List<DataSetIndexZh> findByCodeAndDataSetType(String code, String dataSetType, Pageable pageable);

    DataSetIndexZhfindByDataSetId(String dataSetId);
}

聚合结果:
在这里插入图片描述

  • 12
    点赞
  • 51
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 20
    评论
评论 20
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

张益达·

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值