创建目录
mkdir -p /data/docker/es/elasticsearch/config
mkdir -p /data/docker/es/elasticsearch/data
mkdir -p /data/docker/es/elasticsearch/plugins
mkdir -p /data/docker/es/kibana/config
elasticsearch.yml文件
vi /data/docker/es/elasticsearch/config/elasticsearch.yml
#内容如下:
http.port: 9200
http.host: 0.0.0.0
http.cors.enabled: true
http.cors.allow-origin: "*"
#设置权限
xpack.security.enabled: true
xpack.security.transport.ssl.enabled: true
kibana.yml文件
vi /data/docker/es/kibana/config/kibana.yml
#内容如下:
server.name: kibana
# kibana的主机地址 0.0.0.0可表示监听所有IP
server.host: "0.0.0.0"
# kibana访问es的URL
elasticsearch.hosts: [ "http://elasticsearch:9200" ]
elasticsearch.username: 'elastic'
elasticsearch.password: 'java@2023#'
# 显示登陆页面
xpack.monitoring.ui.container.elasticsearch.enabled: true
# 语言
i18n.locale: "zh-CN"
docker-compose.yaml文件
version: '3.1'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.9.3
container_name: elasticsearch
environment:
- "discovery.type=single-node"
- "bootstrap.memory_lock=true"
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
volumes:
- ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
- ./elasticsearch/data:/usr/share/elasticsearch/data
- ./elasticsearch/plugins:/usr/share/elasticsearch/plugins
ulimits:
memlock:
soft: -1
hard: -1
ports:
- 9200:9200
- 9300:9300
restart: always
networks:
- bq_pro
kibana:
image: docker.elastic.co/kibana/kibana:7.9.3
container_name: kibana
depends_on:
- elasticsearch
volumes:
- ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml
environment:
- "ELASTICSEARCH_URL=http://elasticsearch:9200"
- "I18N_LOCALE=zh-CN"
ports:
- 5601:5601
restart: always
networks:
- bq_pro
networks:
bq_pro:
external: true
启动
cd /data/docker/es
docker-compose up -d
设置密码
进入到elasticsearch容器执行:
#进入到容器
docker exec -it elasticsearch bash
#到bin目录
cd /usr/share/elasticsearch/bin
#设置密码
elasticsearch-setup-passwords interactive
# 输入y 然后依次设置密码即可
访问
elasticsearch:127.0.0.1:9200
kibana:127.0.0.1:5601
安装IK分词器
将下载的压缩文件上传到服务器,解压
cd /data/docker/es/elasticsearch/plugins
mkdir ik
unzip elasticsearch-analysis-ik-7.9.3.zip
chmod -R 777 /data/docker/es/elasticsearch/plugins/ik
进入到elasticsearch容器查看插件
docker exec -it elasticsearch bash
cd bin
elasticsearch-plugin list
看到上图内容说明安装成功,然后重启elasticsearch验证
进入kibana 原始分词器分析
POST _analyze
{
"analyzer": "standard",
"text": ["我是中国人"]
}
IK分词器分析
POST _analyze
{
"analyzer": "ik_smart",
"text": ["我是中国人"]
}
可以看到ik分词器分词更为合理
自定义分词拓展词库
进入到ik配置目录
cd /data/docker/es/elasticsearch/plugins/ik/config
#编写自己的字典
vi my_dic.dic
#将字典加入到配置
vi IKAnalyzer.cfg.xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd">
<properties>
<comment>IK Analyzer 扩展配置</comment>
<!--用户可以在这里配置自己的扩展字典 -->
<entry key="ext_dict">my_dic.dic</entry>
<!--用户可以在这里配置自己的扩展停止词字典-->
<entry key="ext_stopwords"></entry>
<!--用户可以在这里配置远程扩展字典 -->
<!-- <entry key="remote_ext_dict">words_location</entry> -->
<!--用户可以在这里配置远程扩展停止词字典-->
<!-- <entry key="remote_ext_stopwords">words_location</entry> -->
</properties>
也可以将网络字典加入到配置里例如:
<entry key="remote_ext_dict">http://127.0.0.1/dic/my_dic.dic</entry>
最后重启即可