- logstash Grok 脚本 分解过滤日志,写入es
input {
kafka {
type => "normal_log_type"
bootstrap_servers => ["10.2.11.92:9092"]
topics => ["log_topic"]
#group_id => "filebeat-logstash"
#client_id => "logstashnode1"
consumer_threads => 1
codec => json {
charset => "UTF-8"
}
decorate_events => false
}
}
filter {
# 不匹配正则则删除,匹配正则用=~
if [level] !~ "(ERROR|INFO)" {
# 删除日志
drop {}
}
grok {
# 匹配com.ggport 开头的类
match => {
"className" => "(?<ggport>^com.ggport)"
}
}
# 移除匹配生成的标签
mutate {
remove_field => ["ggport"]
}
# 不匹配删除
if "_grokparsefailure" in [tags]{
drop { }
}
grok {
# 匹配以~ 开头的内容
match => {
"message" => "(?<auditType>^~)"
}
}
#删除 不匹配生成的标签
mutate {
remove_field => ["tags"]
}
#如匹配,用KV 分解日志内容
if[auditType]{
kv {
prefix => ""
source => "message"
field_split => "~"
value_split => ":"
}
}
#删除 匹配生成的标签
mutate{
remove_field => ["auditType"]
}
}
output{
#审计日志索引
if [operateType] {
elasticsearch {
hosts => "10.26.11.23:9200"
index => "aduit_log"
document_type => "aduit_log"
}
}
#普通日志索引
if ![operateType] {
elasticsearch {
hosts => "10.26.11.22:9200"
index => "normal_log"
document_type => "normal_log"
}
}
stdout{
codec => rubydebug ##输出到屏幕上
}
}