软件地址:Installation and Upgrade Guide [7.2] | Elastic
IK分析器:https://github.com/medcl/elasticsearch-analysis-ik/releases/tag/v7.2.1
可视化⼯具kibana的安装:Kibana 7.2.0 | Elastic
smartCn分词器
linux生成:sh elasticsearch-plugin install analysis-smartcn
windows生成:elasticsearch-plugin.bat install analysis-smartcn
<!--elasticsearch 7.2.0 依赖包-->
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>7.2.0</version>
<exclusions>
<exclusion>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
</exclusion>
<exclusion>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-client</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-client</artifactId>
<version>7.2.0</version>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>7.2.0</version>
</dependency>
配置类
package com.personal.website.config;
import org.apache.http.HttpHost;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@ConfigurationProperties(prefix = "elasticsearch")
public class EsConfig {
@Value("${elasticsearch.host}")
private String host;
@Value("${elasticsearch.port}")
private Integer port;
@Bean(destroyMethod = "close")
public RestHighLevelClient client(){
return new RestHighLevelClient(RestClient.builder(
new HttpHost(host,port,"http")
));
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
}
数据插入到es中,先把旧数据删除,然后加入新数据
@Resource
private RestHighLevelClient client;
private static final String ARCHIVE_INDEX="archive";
@Override
public void add(EsArchiveVo esArchiveVo) throws ServiceException {
try{
if (StringUtils.isEmpty(esArchiveVo.getOriginalFileId())){
throw new ServiceException("id不能为空");
}
DeleteRequest deleteRequest = new DeleteRequest(ARCHIVE_INDEX,esArchiveVo.getOriginalFileId());
client.delete(deleteRequest, RequestOptions.DEFAULT);
EsArchiveVo archiveVo = elasticSearchDao.selectOne(esArchiveVo.getOriginalFileId());
archiveVo.setTitle(esArchiveVo.getTitle());
archiveVo.setContent(esArchiveVo.getContent());
IndexRequest request=new IndexRequest(ARCHIVE_INDEX).id(archiveVo.getOriginalFileId()).source(beanToMap(archiveVo));
IndexResponse response = client.index(request, RequestOptions.DEFAULT);
log.info("插入数据返回:"+response.toString());
}catch (Exception e){
e.printStackTrace();
throw new ServiceException(e);
}
}
全文搜索 精确查询与模糊查询一起混用
@Override
public PageResult<EsArchiveVo> fullTextQuery(String archiveCategoryId,String subcategoryId,int page,int limit,int type, boolean isTerm, String keyWord) {
try{
// 设置高亮
HighlightBuilder highlightBuilder = new HighlightBuilder();
if (type==1){
highlightBuilder.field("title");
highlightBuilder.field("content");
}else {
highlightBuilder.field("title");
}
highlightBuilder.preTags("<font color='red'>");
highlightBuilder.postTags("</font>");
//设置为0即可返回完整内容 而非片段
highlightBuilder.numOfFragments(0);
SearchRequest searchRequest = new SearchRequest(ARCHIVE_INDEX);
SearchSourceBuilder builder = new SearchSourceBuilder();
//多条件设置
if (isTerm){
if (type==1){
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
if (StringUtils.isNotBlank(keyWord)){
TermQueryBuilder mpq1 = QueryBuilders.termQuery("title", keyWord);
TermQueryBuilder mpq2 = QueryBuilders.termQuery("content", keyWord);
boolQueryBuilder.must(mpq1).must(mpq2);
}
if (StringUtils.isNotBlank(archiveCategoryId)){
TermQueryBuilder termQuery1 = QueryBuilders.termQuery("archiveCategoryId", archiveCategoryId);
boolQueryBuilder.must(termQuery1);
}
if (StringUtils.isNotBlank(subcategoryId)){
TermQueryBuilder termQuery2 = QueryBuilders.termQuery("subcategoryId", subcategoryId);
boolQueryBuilder.must(termQuery2);
}
builder.query(boolQueryBuilder);
}else {
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
if (StringUtils.isNotBlank(keyWord)){
TermQueryBuilder mpq1 = QueryBuilders.termQuery("title", keyWord);
boolQueryBuilder.must(mpq1);
}
if (StringUtils.isNotBlank(archiveCategoryId)){
TermQueryBuilder termQuery1 = QueryBuilders.termQuery("archiveCategoryId", archiveCategoryId);
boolQueryBuilder.must(termQuery1);
}
if (StringUtils.isNotBlank(subcategoryId)){
TermQueryBuilder termQuery2 = QueryBuilders.termQuery("subcategoryId", subcategoryId);
boolQueryBuilder.must(termQuery2);
}
builder.query(boolQueryBuilder);
}
}else {
if (type==1){
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
if (StringUtils.isNotBlank(keyWord)){
MatchQueryBuilder mpq1 = QueryBuilders.matchQuery("title", keyWord);
MatchQueryBuilder mpq2 = QueryBuilders.matchQuery("content", keyWord);
boolQueryBuilder.must(mpq1).must(mpq2);
}
if (StringUtils.isNotBlank(archiveCategoryId)){
TermQueryBuilder termQuery1 = QueryBuilders.termQuery("archiveCategoryId", archiveCategoryId);
boolQueryBuilder.must(termQuery1);
}
if (StringUtils.isNotBlank(subcategoryId)){
TermQueryBuilder termQuery2 = QueryBuilders.termQuery("subcategoryId", subcategoryId);
boolQueryBuilder.must(termQuery2);
}
builder.query(boolQueryBuilder);
}else {
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
if (StringUtils.isNotBlank(keyWord)){
MatchQueryBuilder mpq1 = QueryBuilders.matchQuery("title", keyWord);
boolQueryBuilder.must(mpq1);
}
if (StringUtils.isNotBlank(archiveCategoryId)){
TermQueryBuilder termQuery1 = QueryBuilders.termQuery("archiveCategoryId", archiveCategoryId);
boolQueryBuilder.must(termQuery1);
}
if (StringUtils.isNotBlank(subcategoryId)){
TermQueryBuilder termQuery2 = QueryBuilders.termQuery("subcategoryId", subcategoryId);
boolQueryBuilder.must(termQuery2);
}
builder.query(boolQueryBuilder);
}
}
builder.from((page-1)*limit);
builder.size(limit);
builder.highlighter(highlightBuilder);
searchRequest.source(builder);
LinkedList<EsArchiveVo> archiveList = new LinkedList<>();
SearchResponse response = client.search(searchRequest, RequestOptions.DEFAULT);
SearchHit[] hits = response.getHits().getHits();
long count = response.getHits().getTotalHits().value;
for (SearchHit hit:hits) {
EsArchiveVo player = JSONObject.parseObject(hit.getSourceAsString(), EsArchiveVo.class);
// 设置高亮的一些文本到实体类中
// 封装了高亮
Map<String, HighlightField> highlightFieldMap = hit.getHighlightFields();
HighlightField titleHL = highlightFieldMap.get("title");
if (titleHL != null) {
// 获取指定字段的高亮片段
Text[] fragments = titleHL.getFragments();
// 将这些高亮片段拼接成一个完整的高亮字段
StringBuilder stringBuilder = new StringBuilder();
for (Text text : fragments) {
stringBuilder.append(text);
}
player.setTitle(stringBuilder.toString());
}
HighlightField contentHl = highlightFieldMap.get("content");
if (contentHl != null) {
// 获取指定字段的高亮片段
Text[] fragments = contentHl.getFragments();
// 将这些高亮片段拼接成一个完整的高亮字段
StringBuilder stringBuilder = new StringBuilder();
for (Text text : fragments) {
stringBuilder.append(text);
}
player.setContent(stringBuilder.toString());
}
archiveList.add(player);
if (archiveList!=null && archiveList.size()>0){
for (EsArchiveVo esArchiveVo:archiveList){
esArchiveVo.setFileUrl(getTempImgUrl(esArchiveVo.getFileUrl()));
}
}
}
return PageResult.<EsArchiveVo>builder().data(archiveList).code(0).count(count).build();
}catch (Exception e){
e.printStackTrace();
throw new ServiceException(e);
}
}
新增
public boolean addPlayer(NBAPlayer nbaPlayer, String id) {
IndexRequest request=new IndexRequest(NBA_INDEX).id(id).source(beanToMap(nbaPlayer));
try {
IndexResponse response = client.index(request, RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
获取单个
public Map<String, Object> getPlayer(String id) throws IOException {
GetRequest getRequest = new GetRequest(NBA_INDEX,id);
GetResponse response = client.get(getRequest, RequestOptions.DEFAULT);
return response.getSource();
}
修改
public boolean updatePlayer(NBAPlayer player, String id) throws IOException {
UpdateRequest request = new UpdateRequest(NBA_INDEX, id).doc(beanToMap(player));
UpdateResponse response = client.update(request, RequestOptions.DEFAULT);
System.out.println(JSONObject.toJSON(response));
return true;
}
删除
public boolean deletePlayer(String id) {
DeleteRequest request= new DeleteRequest(NBA_INDEX,id);
try {
DeleteResponse delete = client.delete(request, RequestOptions.DEFAULT);
System.out.println(JSONObject.toJSON(delete));
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
全部删除
public boolean deleteAllPlayer() {
DeleteByQueryRequest request = new DeleteByQueryRequest(NBA_INDEX);
try {
client.deleteByQuery(request,RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
工具类
public static <T> Map<String, Object> beanToMap(T bean) {
Map<String, Object> map = new HashMap<>();
if (bean != null) {
BeanMap beanMap = BeanMap.create(bean);
for (Object key : beanMap.keySet()) {
if(beanMap.get(key) != null)
map.put(key + "", beanMap.get(key));
}
}
return map;
}
批量修改
List<OriginalFile> originalFileList = originalFileDao.selectList(new QueryWrapper<OriginalFile>().eq("archive_item_id", archiveItem.getId()));
if (originalFileList!=null && originalFileList.size()>0){
List<String> idList = originalFileList.stream().map(OriginalFile::getId).collect(Collectors.toList());
HashMap<String, Object> map = elasticSearchDao.selectOneByArchiveItemId(archiveItem.getId());
StringBuilder sb = new StringBuilder();
map.keySet().stream().forEach(p -> {
sb.append("ctx._source.").append(p).append("=params.").append(p).append(";");
});
UpdateByQueryRequest request = new UpdateByQueryRequest(ARCHIVE_INDEX);
request.setQuery(new TermsQueryBuilder("originalFileId",idList));
Script script = new Script(ScriptType.INLINE, "painless", sb.toString(), map);
request.setScript(script);
BulkByScrollResponse bulkByScrollResponse = client.updateByQuery(request, RequestOptions.DEFAULT);
log.info("查询更新索引:[{}]操作共耗时:[{}],共修改文档数:[{}]",
"indexName", bulkByScrollResponse.getTook(), bulkByScrollResponse.getUpdated());
}
批量删除
List<OriginalFile> originalFileList = originalFileDao.selectList(new QueryWrapper<OriginalFile>().eq("archive_item_id", id));
if (originalFileList!=null && originalFileList.size()>0){
List<String> idList = originalFileList.stream().map(OriginalFile::getId).collect(Collectors.toList());
DeleteByQueryRequest request = new DeleteByQueryRequest(ARCHIVE_INDEX);
request.setQuery(new TermsQueryBuilder("originalFileId",idList));
client.deleteByQuery(request, RequestOptions.DEFAULT);
}
elasticSearch启动失败,添加新用户启动
2774 28/06/23 11:07:28 useradd chenyu
2775 28/06/23 11:10:35 chown chenyu elasticsearch-7.2.1 -R
配置文件:
http.port: 9210
xpack.ml.enabled: false