ElasticSearch
kibana测试 ik分词插件
ik_max_work 最细粒度划分!穷进词库的可能!字典
划分我喜欢胡黎勇
会发现 胡 黎 勇 不是一个词 这种情况就要加到我们的分词器的字典里面
新建一个dic 然后在里面添加自己的关键词 荣获把huliyong.dir 在xml 里面配置导入
增加一个索引
创建一个索引 put /索引名/~类型名/文档id
put
PUT /test1/type1/1
{
"name": "hu",
"age":3
}
基本类型
查询
更新
以前直接put 进行覆盖
但是如果覆盖中 漏掉了某个元素 那么这个元素就会丢失
现在:post 进行修改
post /test/_doc/1/_update
{
"doc":{
"name":"uhuhuhu"
}
}
删除
delete /test1
文档的基本操作
PUT /huliyong/user/1
{
"name":"huliyong",
"age": 12,
"desc":"天选之子",
"tags":["大学生","温暖","神奇"]
}
PUT /huliyong/user/2
{
"name":"李四",
"age": 12,
"desc":"大渣男",
"tags":["sd","温暖","神奇"]
}
PUT /huliyong/user/3
{
"name":"王五222",
"age": 22,
"desc":"不知道是谁",
"tags":["sd","高","dsb"]
}
根据id 查询
GET /huliyong/user/3
POST huliyong/user/3
{
"name":"王五333",
"age": 22,
"desc":"不知道是谁",
"tags":["sd","高","dsb"]
}
POST huliyong/user/3/_update
{
"doc":{
"name":"66666"
}
}
根据id 查询
GET /huliyong/user/3
带条件的
GET huliyong/user/_search?q=name:李四
!
复杂查询(模糊,)
hits
查询索引和文档的信息
查询到的总数
荣获查询出来的具体文档
数据中的东西就可以遍历出来
结果过滤
显示自己需要的数据
用java 操作es 所有的方法对象就是里面的key
排序 sort
分页
form :从第几条开始 默认从0开始
size: 返回多少数据(单页面)
bool 多条件精确查询
must 相当于and 所有的条件都要符合
should 相当于or
must_not 相当于not
过滤器
filter 对结果进行过滤 比如这里的range 限制查询的age 范围
gt >
gte>=
lt<
lte<=
匹配多个条件
多个条件 空格进行隔开 只要满足一个就可以被查出
精确查询
term 查询 是直接通过指定的词条进行精确查询
关于分词
直接查找精确的
match:会使用分词器解析 (先分析文档 然后通过分析的文档进行查询)
两个类型:text
keywork 不会被解析
高亮查询
自定义高亮条件
- 匹配
- 按条件匹配
- 精确匹配
- 区间范围匹配
- 匹配字段过滤
- 多条件查询
- 高亮查询
集成SpringBoot
<dependency>
<groupId> org.elasticsearch.client </ groupId>
<artifactId > elasticsearch -rest-high-level-client </ artifactId>
<version> 7.6.2 </ version>
</ dependency>
初始化
RestHighLevelClient client = new RestHighLevelClient(
RestClient.builder(
new HttpHost("localhost", 9200, "http"),
new HttpHost("localhost", 9201, "http")))
client.close();
要保证导入的依赖与本地的版本一致
package com.huliyong.config;
import org.apache.http.HttpHost;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author Administrator
*/
@Configuration
public class ElasticSearchClientConfig {
@Bean
public RestHighLevelClient restHighLevelClient(){
RestClientBuilder restClientBuilder;
HttpHost[] hosts;
RestHighLevelClient client = new RestHighLevelClient(
RestClient.builder(
new HttpHost("127.0.0.1",9200,"http"),
new HttpHost("127.0.0.1", 9201, "http")
));
return client ;
}
}
创建索引
@SpringBootTest
class EsApiApplicationTests {
@Qualifier("restHighLevelClient")
@Autowired
private RestHighLevelClient client;
//创建索引
@Test
void contextLoads() throws IOException {
//1 创建请求
CreateIndexRequest hu = new CreateIndexRequest("hu");
//2 执行请求 客户端 indices 请求获取相应
CreateIndexResponse createIndexResponse = client.indices().create(hu, RequestOptions.DEFAULT);
}
}
获取索引
//获取索引
@Test
void huo() throws IOException {
//获取索引
GetIndexRequest huliyong = new GetIndexRequest("huliyong");
boolean exists = client.indices().exists(huliyong, RequestOptions.DEFAULT);
System.out.println(exists);
}
删除索引
@Test
void del() throws IOException {
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest("hu");
AcknowledgedResponse delete = client.indices().delete(deleteIndexRequest, RequestOptions.DEFAULT);
System.out.println(delete.isAcknowledged());
}
添加文档测试
//文档测试
@Test
void addDocument() throws IOException {
//创建对象
User user = new User("huliyong",26);
//创建请求
IndexRequest indexRequest = new IndexRequest("hu");
//规则 put/hu/_doc/1
//indexRequest.timeout(TimeValue.timeValueSeconds(10));
//需要把user 对象转json
indexRequest.id("1").timeout("10s").source(JSON.toJSONString(user), XContentType.JSON);
//客户端发送请求 获取响应
IndexResponse index = client.index(indexRequest, RequestOptions.DEFAULT);
System.out.println(index.toString());
}
IndexResponse[index=hu,type=_doc,id=1,version=1,result=created,seqNo=0,primaryTerm=1,shards={“total”:2,“successful”:1,“failed”:0}]
获取文档测试
//获取文档测试
@Test
void testget() throws IOException {
GetRequest request = new GetRequest("hu","1");
//不获取返回的_source 的上下文
request.fetchSourceContext(new FetchSourceContext(false));
request.storedFields("_none_");
boolean exists = client.exists(request, RequestOptions.DEFAULT);
System.out.println(exists);
}
获取文档信息
//获取文档信息
@Test
void testGet() throws IOException {
GetRequest getRequest = new GetRequest("hu", "1");
GetResponse documentFields = client.get(getRequest, RequestOptions.DEFAULT);
System.out.println(documentFields.getSource());
System.out.println(documentFields.getSourceAsString());
System.out.println(documentFields.getSourceAsMap());
}
更新文档信息
//更新文档信息
@Test
void testGetDocument() throws IOException{
UpdateRequest updateRequest= new UpdateRequest("hu", "1");
updateRequest.timeout("1s");
User user = new User("hhhhh", 12);
UpdateRequest doc = updateRequest.doc(JSON.toJSONString(user), XContentType.JSON);
UpdateResponse update = client.update(updateRequest, RequestOptions.DEFAULT);
System.out.println(update.status());
}
删除文档记录
//删除文档记录
@Test
void testGet2() throws IOException{
DeleteRequest deleteRequest = new DeleteRequest("hu", "1");
deleteRequest.timeout("1s");
DeleteResponse delete = client.delete(deleteRequest, RequestOptions.DEFAULT);
System.out.println(delete);
System.out.println(delete.status());
}
批量操作
道理相同 里面的插入 可以改成修改删除。。。
插入不指定id 就会随机id
//批量插入数据
@Test
void bulk() throws IOException{
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.timeout("1s");
ArrayList<User> userArrayList = new ArrayList<>();
userArrayList.add(new User("12",12));
userArrayList.add(new User("13",13));
userArrayList.add(new User("14",14));
userArrayList.add(new User("15",15));
userArrayList.add(new User("16",16));
userArrayList.add(new User("17",17));
for (int i= 0;i<userArrayList.size();i++){
bulkRequest.add(new IndexRequest("hu")
.id(""+i+1)
.source(JSON.toJSONString(userArrayList.get(i)),XContentType.JSON)
);
}
BulkResponse bulk = client.bulk(bulkRequest, RequestOptions.DEFAULT);
System.out.println(bulk.status());
}
查询
SearchRequest搜索请求
SearchBulider 条件构造
HighlightBulider 构建高亮
TermQueryulider 精确查询
MatchAllQueryBulider 查询所有
//查询
@Test
void query() throws IOException {
SearchRequest searchRequest = new SearchRequest(Esconst.ES_INDEX);
//构建搜索条件
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
//查询条件用 queryBuilder 工具来实现
/**
* QueryBuilders.termQuery 精确查询
QueryBuilders.matchAllQuery() 匹配所有
*/
TermQueryBuilder queryBuilder = QueryBuilders.termQuery("name", "13");
searchSourceBuilder.query(queryBuilder);
searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));
searchRequest.source(searchSourceBuilder);
SearchResponse search = client.search(searchRequest, RequestOptions.DEFAULT);
System.out.println(search.getHits());
System.out.println("==================");
for (SearchHit hit : search.getHits().getHits()) {
System.out.println(hit.getSortValues());
System.out.println(hit.getSourceAsMap());
}
}
示例Demo
后续发到gitee上面