logstash配置
logstash消费kafka日志,过滤后发送到elasticsearch存储
日志包括:Java服务日志、redis日志、consul日志、配置中心apollo日志
logstash安装参考:https://blog.csdn.net/huchao_lingo/article/details/103495994)
添加定制正则
进入logstash安装目录,创建正则目录patterns
cd /opt/server/logstash
mkdir patterns
grok配置
grok的编写配置虽然不难,但是很容易出错,可以使用grok debugger来测试。
https://grokdebug.herokuapp.com/
添加redis日志格式匹配正则
[root@logstash patterns]# vim redis
EDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME}
REDISLOG %{POSINT:pid}\:%{WORD:role} %{REDISTIMESTAMP:timestamp} %{DATA:loglevel} %{GREEDYDATA:msg}
添加apollo日志格式匹配正则
[root@logstash patterns]# vim apollo
JAVA_DATESTAMP 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND})
JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]*
JAVALOGMESSAGE (.*)
JAVATHREAD (.*)
APOLLO_LOG %{JAVA_DATESTAMP:timestamp}(\s+)%{LOGLEVEL:level} %{NUMBER:line}(\s+)---(\s+)\[%{JAVATHREAD:thread}\](\s+)%{JAVACLASS:class}(\s+)%{JAVALOGMESSAGE:msg}
添加consul日志格式匹配正则
[root@logstash patterns]# vim consul
CONSUL_DATE %{YEAR}/%{MONTHNUM}/%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}
CONSUL_LOG %{CONSUL_DATE:datetime} \[%{LOGLEVEL:level}\] %{GREEDYDATA:msg}
修改logstash配置文件log.conf
input{
kafka{
bootstrap_servers => ["172.16.10.213:9092,172.16.10.214:9092,172.16.10.218:9092"]
client_id => "logstash"
group_id => "logstash"
auto_offset_reset => "latest"
consumer_threads => 10
decorate_events => true
topics => ["logs"]
type => "log_type"
codec => json {charset => "UTF-8"}
}
}
filter {
if [fields][log-type] == "service" {
json {
source => "message"
target => "json"
remove_field => ["message"]
}
}
else if [fields][log-type] == "consul" {
grok{
patterns_dir => "/opt/server/logstash/patterns"
match => { "message" => "%{CONSUL_LOG}" }
}
date {
timezone => "Asia/Shanghai"
match => ["datetime","yyyy/MM/dd HH:mm:ss"]
}
}
else if [fields][log-type] == "redis" {
grok{
patterns_dir => "/opt/server/logstash/patterns"
match => { "message" => "%{REDISLOG}" }
}
mutate {
gsub => [
"loglevel", "\.", "debug",
"loglevel", "\-", "verbose",
"loglevel", "\*", "notice",
"loglevel", "\#", "warning",
"role","X","sentinel",
"role","C","RDB/AOF writing child",
"role","S","slave",
"role","M","master"
]
}
date {
timezone => "Asia/Shanghai"
match => ["timestamp","dd MMM HH:mm:ss.SSS","yyyy/MM/dd HH:mm:ss:SSS"]
}
}
else if [fields][log-type] == "apollo" {
grok{
patterns_dir => "/opt/server/logstash/patterns"
match => { "message" => "%{APOLLO_LOG}" }
}
date {
timezone => "Asia/Shanghai"
match => ["timestamp","yyyy-MM-dd HH:mm:ss.SSS"]
}
}
}
output {
if [fields][log-type] == "service" {
elasticsearch {
hosts => ["http://172.16.10.219:9200","http://172.16.10.220:9200","http://172.16.10.221:9200"]
index => "service"
}
}
else if [fields][log-type] == "consul" {
elasticsearch {
hosts => ["http://172.16.10.219:9200","http://172.16.10.220:9200","http://172.16.10.221:9200"]
index => "consul"
}
}
else if [fields][log-type] == "redis" {
elasticsearch {
hosts => ["http://172.16.10.219:9200","http://172.16.10.220:9200","http://172.16.10.221:9200"]
index => "redis"
}
}
else if [fields][log-type] == "apollo" {
elasticsearch {
hosts => ["http://172.16.10.219:9200","http://172.16.10.220:9200","http://172.16.10.221:9200"]
index => "apollo"
}
}
}