Elasticsearch6.3.1安装IK
1.下载插件
yum install unzip
2 下载elasticsearch-analysis-ik-6.3.1.zip
https://github.com/medcl/elasticsearch-analysis-ik/releases
3 拖入crt中
4.在cd elasticsearch-6.3.1/plugins/下新建文件夹
mkdir ik
5 解压在相应位置
unzip elasticsearch-analysis-ik-6.3.1.zip -d elasticsearch-6.3.1/plugins/ik/
6 重启elasticsearch
su zpark
cd /apps/elasticsearch-6.3.1/bin
./elasticsearch
7 启动kibana
[root@hdp-1 apps]# cd kibana-6.3.1-linux-x86_64
[root@hdp-1 kibana-6.3.1-linux-x86_64]# cd bin/
[root@hdp-1 bin]# ./kibana
8 上网查看喽
http://hdp-1:5601/app/kibana
在kibana下的Dev Tools写
测试IK中文分词器的基本功能
(1)ik_smart
(2)ik_max_word
(3)新词
输入
GET _analyze?pretty
{
"analyzer": "ik_max_word",
"text": "我是中国人"
}
GET _analyze?pretty
{
"analyzer": "ik_smart",
"text": "安徽省长江流域"
}
GET _analyze?pretty
{
"analyzer": "ik_smart",
"text": "王者荣耀"
}
结果
{
"tokens": [
{
"token": "我",
"start_offset": 0,
"end_offset": 1,
"type": "CN_CHAR",
"position": 0
},
{
"token": "是",
"start_offset": 1,
"end_offset": 2,
"type": "CN_CHAR",
"position": 1
},
{
"token": "中国人",
"start_offset": 2,
"end_offset": 5,
"type": "CN_WORD",
"position": 2
},
{
"token": "中国",
"start_offset": 2,
"end_offset": 4,
"type": "CN_WORD",
"position": 3
},
{
"token": "国人",
"start_offset": 3,
"end_offset": 5,
"type": "CN_WORD",
"position": 4
}
]
}
{
"tokens": [
{
"token": "安徽省",
"start_offset": 0,
"end_offset": 3,
"type": "CN_WORD",
"position": 0
},
{
"token": "长江流域",
"start_offset": 3,
"end_offset": 7,
"type": "CN_WORD",
"position": 1
}
]
}
{
"tokens": [
{
"token": "王者",
"start_offset": 0,
"end_offset": 2,
"type": "CN_WORD",
"position": 0
},
{
"token": "荣耀",
"start_offset": 2,
"end_offset": 4,
"type": "CN_WORD",
"position": 1
}
]
}
4.4 扩展字典
(1)查看已有词典
[root@hdp-1 root]$ cd apps
[root@hdp-1 apps]$ cd elasticsearch-6.3.1/
[root@hdp-1 elasticsearch-6.3.1]$ cd plugins/
[root@hdp-1 plugins]$ cd ik
[root@hdp-1 ik]$ cd config/
(2)自定义词典
在[root@hdp-1 ik]$ cd config/下新建文件
mkdir newword.dic
(vi newword.dic)
cat newword.dic
老铁
王者荣耀
洪荒之力
共有产权房
一带一路
(3)更新配置
在[root@hdp-1 ik]$ cd config/下
vi IKAnalyzer.cfg.xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd">
<properties>
<comment>IK Analyzer 扩展配置</comment>
<!--用户可以在这里配置自己的扩展字典 -->
<entry key="ext_dict">newword.dic</entry>
<!--用户可以在这里配置自己的扩展停止词字典-->
<entry key="ext_stopwords"></entry>
<!--用户可以在这里配置远程扩展字典 -->
<!-- <entry key="remote_ext_dict">words_location</entry> -->
<!--用户可以在这里配置远程扩展停止词字典-->
<!-- <entry key="remote_ext_stopwords">words_location</entry> -->
</properties>
(4)重启elasticsearch
之后检查是否加载了文档
[Dict Loading] /root/apps/elasticsearch-6.3.1/plugins/ik/config/newword.dic
(5)重启Kibana
GET _analyze?pretty
{
"analyzer": "ik_smart",
"text": "王者荣耀"
}
{
"tokens": [
{
"token": "王者荣耀",
"start_offset": 0,
"end_offset": 4,
"type": "CN_WORD",
"position": 0
}
]
}