ELK-日志多行合并和字段解析以及时间处理

readme

filebeat regexp

  • 19/10/31 23:59:59
    date +"%y/%m/%d %T"
    
  • 2019/10/30 16:08:17
    date +"%Y/%m/%d %T"
    
  • 2019/10/30 02:00:00.003
    date +"%Y/%m/%d %T.%3N"
    
  • 2019-10-30 10:59:44
    date +"%F %T"
    
  • 2019-10-30 15:43:56.652
    date +"%F %T.%3N"
    
  • 2019-10-30 10:59:59 133.998
    s=`date +"%F %T %6N"`; echo ${s:0:-3}.${s:((${#s}-3)):3}
    
  • 2019-11-08T16:56:55.520+0800
    date +"%F %T.%3N%z"
    
  • [2019-10-30 14:49:22]
    date +"[%F %T]"
    
  • [2019-11-03 23:59:31.001703]
    date +"[%F %T.%6N]"
    

支持以上9种时间格式

^\[?(?:\d\d){1,2}[-\/](?:0?[1-9]|1[0-2])[-\/](?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])[T ](?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)\s*(?:[0-9]+.[0-9]+)?\]?
multiline:
  pattern: '^\[?(?:\d\d){1,2}[-\/](?:0?[1-9]|1[0-2])[-\/](?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)\s*(?:[0-9]+.[0-9]+)?\]?'
  negate: true
  match: after
  max_lines: 500

1.1 19/10/30 23:59:59

log

19/10/30 23:59:59 INFO Client: Application report for application_1565231848008_966301 (state: RUNNING)

logstash.conf

filter {
  grok {
    match => {
      "message" => "%{CUSTOM_TIMESTAMP:timestamp}%{SPACE}%{WORD:log.level}%{SPACE}%{JAVALOGMESSAGE:message}"
    }
    pattern_definitions => {
     "CUSTOM_TIMESTAMP" => "%{YEAR}/%{MONTHNUM}/%{MONTHDAY}%{SPACE}%{HOUR}:?%{MINUTE}(?::?%{SECOND})?"
    }
    overwrite => [ "message" ]
  }
  date {
    match => ["timestamp", "yy/MM/dd HH:mm:ss"]
    target => "@timestamp"
    timezone => "Asia/Shanghai"
    remove_field => ["timestamp"]
  }
}

filebeat.conf

^(?:\d\d)\/(?:0?[1-9]|1[0-2])\/(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)

1.2 2019/10/30 16:08:17

log

2019/10/30 16:08:17 [error] 22#22: *17587 FastCGI sent in stderr: "Primary script unknown" while reading response header from upstream, client: 10.21.2.202, server: localhost, request: "GET /html/public/index.php HTTP/1.0", upstream: "fastcgi://127.0.0.1:9000", host: "61.147.167.189"

logstash.conf

filter {
  if "error" in [log][file][path] {
    grok {
      match => {
        # 2019/11/03 20:10:22
        "message" => "%{CUSTOM_TIMESTAMP:timestamp}%{SPACE}\[%{WORD:log.level}\]%{GREEDYDATA:message}"
      }
      pattern_definitions => {
        "CUSTOM_TIMESTAMP" => "%{YEAR}/%{MONTHNUM}/%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?"
      }
      overwrite => [ "message" ]
    }
    date {
      match => ["timestamp", "yyyy/MM/dd HH:mm:ss"]
      target => "@timestamp"
      timezone => "Asia/Shanghai"
      remove_field => ["timestamp"]
    }
  }
}

filebeat.conf

^(?:\d\d){2}\/(?:0?[1-9]|1[0-2])\/(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)

1.3 2019/10/30 02:00:00.003

2019/10/30 02:00:00.003 [D] [monitor.go:213]  {"code":-1,"message":"不用报警!","data":null} 

logstash.conf

filter {
  grok {
    match => {
      "message" => '%{CUSTOM_TIMESTAMP:timestamp}%{SPACE}\[%{WORD:log.level}\]%{SPACE}%{GREEDYDATA:message}'
    }
    pattern_definitions => {
      # 2019/11/04 08:00:01.776
      "CUSTOM_TIMESTAMP" => "%{YEAR}/%{MONTHNUM}/%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?"
    }
    overwrite => [ "message" ]
  }
  date {
    match => ["timestamp", "yyyy/MM/dd HH:mm:ss.SSS"]
    target => "@timestamp"
    timezone => "Asia/Shanghai"
    remove_field => ["timestamp"]
  }
}

filebeat.conf

^(?:\d\d){2}\/(?:0?[1-9]|1[0-2])\/(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+))

1.4 2019-10-30 10:59:44

log

2019-10-30 10:59:44 [10.20.2.8][624][-][info][realtime] realtime {"type":"countIncrLaunchByDay","metaName":"205_699","day":"2019_10_30","pubkey":"bfar","sign":"35de7d15ddff5c4cf5b49ffe41708678"}200 {"result":0,"values":{"spld":304,"slots":{"1572364800":57,"1572368400":27,"1572372000":14,"1572375600":11,"1572379200":9,"1572382800":14,"1572386400":5E+1,"1572390000":43,"1572393600":56,"1572397200":49,"1572400800":73},"total":403}}

logstash.conf

filter {
  grok {
    match => {
      # 2019-11-06 11:25:01
      "message" => '%{TIMESTAMP_ISO8601:timestamp}%{SPACE}\[%{IPORHOST:req.ip}\]%{GREEDYDATA:other}\[%{LOGLEVEL:log.level}\]%{GREEDYDATA:message}'
    }
    overwrite => [ "message" ]
  }
  date {
    match => ["timestamp", "yyyy-MM-dd HH:mm:ss"]
    target => "@timestamp"
    timezone => "Asia/Shanghai"
    remove_field => ["timestamp"]
  }
  if [other] {
    mutate {
      update => { "message" => "%{other} %{message}"}
      remove_field => ["other"]
    }
  }
}

filebeat.conf

^(?:\d\d){2}-(?:0?[1-9]|1[0-2])-(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)

1.5 2019-10-30 15:43:56.652

log

2019-10-30 15:43:56.652  INFO 20 --- [scheduling-1] c.s.a.i.repository.ImpalaRepository      : schema column size: 12

logstash.conf

filter {
   grok {
     match => {
       "message" => "%{TIMESTAMP_ISO8601:timestamp}%{SPACE}%{WORD:log.level}%{SPACE}%{JAVALOGMESSAGE:message}"
     }
     overwrite => [ "message" ]
   }
   date {
     match => ["timestamp", "ISO8601"]
     target => "@timestamp"
     timezone => "Asia/Shanghai"
     remove_field => ["timestamp"]
   }
}

filebeat.conf

^(?:\d\d){2}-(?:0?[1-9]|1[0-2])-(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)

1.6 2019-10-30 10:59:59 133.998

log

2019-10-30 10:59:59 133.998,   112.96.109.99:0,   authen/lsc/getThirdAccountBindStatus,   55101,   1121,   -1,   -1,   ,   ,   ,   app_id=694&sndaId=1993027799&key1=312&key2=315,   10.10.40.82:8004,   6.191,   0,   { "return_code": 0, "return_message": "", "data": { "mobileBindFlag": 1, "status1": 0, "status2": 0 } },   tmstamp:888888,   sigstat:0,   58DB46BFEC55944BA218D497EC4323D3

logstash.conf

filter {
  grok {
    match => {
      # 2019-11-05 14:11:58 514.508
      "message" => "%{CUSTOM_TIMESTAMP:timestamp},%{SPACE}%{IPORHOST:req.clientIP}:%{NONNEGINT},%{SPACE}%{NOTCOMMA:req.url}                                                                 ,%{SPACE}%{NUMBER:req.serviceNo},%{SPACE}%{NUMBER:req.messageNo},%{SPACE}%{GREEDYDATA:message},%{SPACE}%{GREEDYDATA:req.id}"
    }
    pattern_definitions => {
       "CUSTOM_TIMESTAMP" => "%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{SPACE}(?:[0-9]+.[0-9]+)?"
       "NOTCOMMA" => "(?:[^,]+)"
    }
    overwrite => [ "message" ]
  }
  date {
    match => ["timestamp", "yyyy-MM-dd HH:mm:ss SSS.SSS"]
    target => "@timestamp"
    timezone => "Asia/Shanghai"
    remove_field => ["timestamp"]
  }
  if [req.clientIP] {
    geoip {
      source => "req.clientIP"
      database => "/etc/logstash/geoip/GeoLite2-City.mmdb"
      add_field => ["[geoip][coordinates]", "%{[geoip][longitude]}"]
      add_field => ["[geoip][coordinates]", "%{[geoip][latitude]}"]
    }
    mutate {
      convert => ["[geoip][coordinates]", "float"]
      remove_field  => [
        "[geoip][latitude]",
        "[geoip][longitude]",
        "[geoip][continent_code]",
        "[geoip][country_code3]",
        "[geoip][dma_code]",
        "[geoip][ip]",
        "[geoip][postal_code]",
        "[geoip][region_code]",
        "[geoip][timezone]"
      ]
    }
  }
}

filebeat.conf

^(?:\d\d){2}-(?:0?[1-9]|1[0-2])-(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)\s+(?:[0-9]+.[0-9]+)

1.7 2019-11-08T16:56:55.520+0800

log

2019-11-08T16:56:55.520+0800	INFO	beater/filebeat.go:443	Stopping filebeat

logstash.conf

filter {
   grok {
     match => {
       "message" => "%{TIMESTAMP_ISO8601:timestamp}%{SPACE}%{WORD:log.level}%{SPACE}%{GREEDYDATA:message}"
     }
     overwrite => [ "message" ]
   }
   date {
     match => ["timestamp", "ISO8601"]
     target => "@timestamp"
     timezone => "Asia/Shanghai"
     remove_field => ["timestamp"]
   }
}

filebeat.conf

^(?:\d\d){2}-(?:0?[1-9]|1[0-2])-(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])T(?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)(?:[+-]?(?:[0-9]+))

1.8 [2019-10-30 14:49:22]

log

[2019-10-30 14:49:22] local.DEBUG: app.requests {"ips":["10.21.2.202"],"method":"POST","time":0.022209882736206055,"url":"http://adtrack.example.cn/api/admin/apps/list","request":[],"content":"","response":"{\"code\":0,\"data\":[{\"id\":100,\"app_name\":\"\\u6012\\u7130\\u4e09\\u56fd\\u6740\",\"app_key\":\"190923093407\",\"created_at\":null},{\"id\":12213934,\"app_name\":\"test_\\u6012\\u7130\",\"app_key\":\"191024903641\",\"created_at\":\"2019-10-24 04:12:44\"},{\"id\":13567891,\"app_name\":\"test_\\u738b\\u8005\\u5feb\\u8dd1\",\"app_key\":\"19102398..."} 

logstash.conf

filter {
  grok {
    match => {
      # [2019-11-04 17:29:40]
      "message" => "\[%{TIMESTAMP_ISO8601:timestamp}\]%{SPACE}%{CUSTOM_LOGLEVEL:log.level}%{GREEDYDATA:message}"
    }
    pattern_definitions => {
      "CUSTOM_LOGLEVEL" => "%{WORD}.%{WORD}"
    }
    overwrite => [ "message" ]
  }
  date {
    match => ["timestamp", "ISO8601"]
    target => "@timestamp"
    timezone => "Asia/Shanghai"
    remove_field => ["timestamp"]
  }
}

filebeat

^\[(?:\d\d){2}-(?:0?[1-9]|1[0-2])-(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)\]

1.9 [2019-11-03 23:59:31.001703]

log

[2019-10-30 10:47:52.422006] production.DEBUG: [02:47:52.284519][10.30.40.18]/pay/notify/1012/100 测试 {"url":"https://topserver.example.com/topserver/pay","query":{"order_id":"191030036629146872","app_order_id":"1926603753545269248","order_amount":"30.00","product_id":null,"role_id":1184833372866744320,"extend_order_id":"200601201910301047439196646","success_time":"2019-10-30 10:47:52","extend_account":"test","debug":0,"channel":1012,"hash":"867b8a16007959b32db5d9ca4e9aca24b36c7e59563384e89bc8286a24c32241ebaab4cebb1303b9a80889a2ef317747ef49fd350200ebfd3ef83e486126118b"},"response":"{\"code\":0,\"message\":\"success\"}","time":0.10932707786560059} []

logstash

filter {
 grok {
   match => {
     # [2019-11-03 23:59:31.001703]
     "message" => "\[%{TIMESTAMP_ISO8601:timestamp}\]%{SPACE}%{CUSTOM_LOGLEVEL:log.level}%{GREEDYDATA:message}"
   }
   pattern_definitions => {
     "CUSTOM_LOGLEVEL" => "%{WORD}.%{WORD}"
   }
   overwrite => [ "message" ]
  }
  date {
    match => ["timestamp", "yyyy-MM-dd HH:mm:ss.SSSSSS"]
    target => "@timestamp"
    timezone => "Asia/Shanghai"
    remove_field => ["timestamp"]
  }
}

filebeat.conf

^\[(?:\d\d){2}-(?:0?[1-9]|1[0-2])-(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+))\]
  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值