/1、ES创建创建删除索引策略
PUT _ilm/policy/log_delete_policy
{
"policy": {
"phases": {
"delete": {
"min_age": "30d",
"actions": {
"delete": {}
}
}
}
}
}
2、创建索引模板关联删除
PUT _template/logs_template
{
"index_patterns": ["hadoop-hdfs-log-*","cloudera-scm-*","hadoop-yarn-log-*","hbase-log-*","hive-log-*","nginx-log-*","spark-log-*","tomcat-log-*","zookeeper-log-*",
"hadoop-mapreduce-log-*","flume-ng-log-*","sorl-log-*","presto-log-*","yarn-log-*","elasticsearch-log-*","kafka-log-*","flink-log-*"],
"settings":{
"number_of_shards": 2,
"number_of_replicas": 1,
"index.lifecycle.name": "log_delete_policy"
},
"mappings":{
"properties":{
"message":{
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
}
}
3、filebeat配置采集任务
filebeat目录/opt/applications/filebeat-7.13.2-liunx-x86_64
1>配置filebeat配置文件/opt/applications/filebeat-7.13.2-liunx-x86_64/filebeat.yml
试例:
filebeat.inputs //输入
- type: log
paths:
- /opt/logs/hadoop-hdfs/* //filebeat暂不支持递归目录
fields
source: hadoop-hdfs
...
setup.template.name: "logs_template" //数据入ES创建索引的索引模板
setup.template.pattern: "%{fields.source}-log-*" //匹配索引模板的索引名称
output.elasticsearch //输出
username: XXX //连接ES的用户名
password: "XXXX" //连接ES的密码
hosts: ["X.X.X.X:9200"] //连接ES的主机列表
indices:
- index: "hadoop-hdfs-log-%{+yyyy.MM.dd}" //日志输出的索引名称以天为量度
when.equals
fields:
source: "hadoop-hdfs" //日志来源和inputs的source相匹配
- index ...
2>配置完filebeat的配置文件之后启动filebeat
/opt/applications/filebeat-7.13.2-liunx-x86_64/filebeat -e -c filebeat.yml > filebeat.log 2>&1 &
需要把配置好的filebeat-7.13.2-liunx-x86_64发送到每台服务器,且每台都需要单独启动