Logstash处理(nginx、nginx_error、application)日志到ES

input{

#从kakfa读取数据

kafka{

bootstrap_servers => ["xxx:9092"]

client_id => "logstash_app_log_group"

group_id => "logstash_app_log_group"

auto_offset_reset => "latest"

consumer_threads => 30

decorate_events => true

topics => ["application_log_topic"]

codec => "json"

type => "app"

}

 

kafka{

bootstrap_servers => ["xxx:9092"]

client_id => "logstash_nginx_log_group"

group_id => "logstash_nginx_log_group"

auto_offset_reset => "latest"

consumer_threads => 10

decorate_events => true

topics => ["nginx_log_topic"]

#codec => "json"

type => "nginx"

}

 

kafka{

bootstrap_servers => ["xxx:9092"]

client_id => "logstash_nginx_error_log_group"

group_id => "logstash_nginx_error_log_group"

auto_offset_reset => "latest"

consumer_threads => 5

decorate_events => true

topics => ["nginx_error_topic"]

#codec => "json"

type => "error"

}

}

#数据处理,提取相关字段

filter {

if[type] == "app"{

grok {

match => {

"message" => "\[%{GREEDYDATA:logLevel}\] \[%{GREEDYDATA:systemName}\] \[%{TIMESTAMP_ISO8601:requestDate}\] \[%{GREEDYDATA:threadName}\] \[%{GREEDYDATA:classMethod}\] \[%{GREEDYDATA:printInfo}\]%{GREEDYDATA:stack}

"

}

}

mutate {

remove_field =>["message"]

remove_field =>["kafka"]

}

date {

match => ["requestDate", "yyyy-MM-dd HH:mm:ss.SSS"]

timezone => "Asia/Shanghai"

target => "@timestamp"

}

} else if[type] == "nginx"{

grok {

#match => { "message" => "%{NGINXACCESS}" }

match => {

"message" => "%{IP:remote_addr} \- \- \[%{HTTPDATE:time_local}\] \"%{WORD:method} %{GREEDYDATA:request} HTTP/%{NUMBER:httpversion}\" %{NUMBER:http_status} %{NUMBER:body_bytes_sent}\s{1,}%{QS:http_referer}\s{1,}%{

QS:http_user_agent} %{GREEDYDATA:http_x_forwarded_for} %{GREEDYDATA:request_time} %{GREEDYDATA:upstream_response_time} %{GREEDYDATA:host_name}"

}

}

mutate {

remove_field =>["message"]

remove_field =>["kafka"]

}

date {

match => ["time_local", "dd/MMM/yyyy:HH:mm:ss Z"]

timezone => "Asia/Shanghai"

target => "@timestamp"

}

} else if[type] == "error"{

grok {

match => { "message" => "(?<logDate>\d{4}/\d{2}/\d{2}\s{1,}\d{2}:\d{2}:\d{2}) \[%{LOGLEVEL:logLevel}\] %{POSINT:pid}#%{NUMBER}: %{GREEDYDATA:errormessage}(?:, client: (?<remote_addr>%{IP}|%{HOSTNAME}))(?:, server: %{IPO

RHOST:server}?)(?:, request: %{QS:request})?(?:, upstream: (?<upstream>\"%{URI}\"|%{QS}))?(?:, host: %{QS:request_host})?(?:, referrer: \"%{URI:referrer}\")?" }

}

mutate {

remove_field =>["message"]

remove_field =>["kafka"]

}

date {

match => ["logDate", "yyyy/MM/dd HH:mm:ss"]

timezone => "Asia/Shanghai"

target => "@timestamp"

}

}

}

 

output {

#输出到ES

if[type] == "app"{

elasticsearch{

hosts => ["xxx:9200"]

index => "app-%{+YYYYMMdd}"

timeout => 300

}

} else if[type] == "nginx"{

if "_grokparsefailure" not in [tags] and "_dateparsefailure" not in [tags] {

elasticsearch{

hosts => ["xxx:9200"]

index => "nginx-%{+YYYYMMdd}"

timeout => 300

}

}

} else if[type] == "error"{

elasticsearch{

hosts => ["xxx:9200"]

index => "error-nginx-%{+YYYYMMdd}"

timeout => 300

}

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值