采集多个日志
收集单个Nginx日志
如果有采集多个日志的需求
Filebeat采集多个日志配置
filebeat.inputs:
- type: log
tail_files: true
backoff: "1s"
paths:
- /usr/local/nginx/logs/access.json.log
fields:
type: access
fields_under_root: true
- type: log
tail_files: true
backoff: "1s"
paths:
- /var/log/secure
fields:
type: secure
fields_under_root: true
output:
logstash:
hosts: ["xxx.xxx.xxx.xx:5044"]
Logstash如何判断两个日志
Filebeat加入一字段用来区别
Logstash使用区别字段来区分
Logstash通过type字段进行判断
input {
beats {
host => '0.0.0.0'
port => 5044
}
}
filter {
if [type] == "access" {
json {
source => "message"
remove_field => ["message","@version","path","beat","input","log","offset","prospector","source","tags"]
}
}
}
output{
if [type] == "access" {
elasticsearch {
hosts => ["http://xxx.xxx.xxx.xx:9200"]
index => "access-%{+YYYY.MM.dd}"
}
}
else if [type] == "secure" {
elasticsearch {
hosts => ["http://xxx.xxx.xxx.xx:9200"]
index => "secure-%{+YYYY.MM.dd}"
}
}
}
output
input {
kafka {
bootstrap_servers => "xxx.xxx.xx.xx:9092"
topics => ["unexpectedly"]
group_id => "unexpectedly"
consumer_threads => 1
decorate_events => true
codec => "json"
#auto_offset_reset => true
}
}
filter {
grok {
match => {
"message" => "%{DATA:HTTPD_COMBINEDLOG}"
}
}
}
output {
stdout { codec=>rubydebug }
}