LogStash的Filter的使用

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
input {
      file{
          path =>  "/XXX/syslog.txt"
          start_position => beginning
          codec => multiline{
              patterns_dir => [ "/XX/logstash-1.5.3/patterns" ]
              pattern =>  "^%{MESSAGE}"
              negate =>  true
              what =>  "previous"
          }
      }
}
filter{
     mutate{
      split => [ "message" , "|" ]
         add_field =>   {
             "tmp"  =>  "%{[message][0]}"
         }
         add_field =>   {
             "DeviceProduct"  =>  "%{[message][2]}"
         }
         add_field =>   {
             "DeviceVersion"  =>  "%{[message][3]}"
         }
         add_field =>   {
             "Signature ID"  =>  "%{[message][4]}"
         }
         add_field =>   {
             "Name"  =>  "%{[message][5]}"
         }
     }
 
     mutate{
      split => [ "tmp" , ":" ]
         add_field =>   {
             "tmp1"  =>  "%{[tmp][1]}"
         }
         add_field =>   {
             "Version"  =>  "%{[tmp][2]}"
         }
         remove_field => [  "tmp"  ]
     }
 
     grok{
        patterns_dir => [ "/XXX/logstash-1.5.3/patterns" ]
        match => { "tmp1"  =>  "%{TYPE:type}" }
        remove_field => [  "tmp1" ]
     }
 
     kv{
        include_keys => [ "eventId" "msg" "end" "mrt" "modelConfidence" "severity" "relevance" , "assetCriticality" , "priority" , "art" , "rt" , "cs1" , "cs2" , "cs3" , "locality" , "cs2Label" , "cs3Label" , "cs4Label" , "flexString1Label" , "ahost" , "agt" , "av" , "atz" , "aid" , "at" , "dvc" , "deviceZoneID" , "deviceZoneURI" , "dtz" , "eventAnnotationStageUpdateTime" , "eventAnnotationModificationTime" , "eventAnnotationAuditTrail" , "eventAnnotationVersion" , "eventAnnotationFlags" , "eventAnnotationEndTime" , "eventAnnotationManagerReceiptTime" , "_cefVer" , "ad.arcSightEventPath" ]
     }
     mutate{
      split => [ "ad.arcSightEventPath" , "," ]
         add_field =>   {
             "arcSightEventPath"  =>  "%{[ad.arcSightEventPath][0]}"
         }
         remove_field => [  "ad.arcSightEventPath"  ]
         remove_field => [  "message"  ]
     }
 
}
output{
     kafka{
         topic_id =>  "rawlog"
         batch_num_messages =>  20
         broker_list =>  "10.3.162.193:39192,10.3.162.194:39192,10.3.162.195:39192"
         codec =>  "json"
     }
     stdout{
        codec => rubydebug
     }

input:接入数据源

filter:对数据源进行过滤

output: 输出的

其中最重要的是filter的处理,目前我们的需求是需要对字符串进行key-value的提取

1、使用了mutate中的split,能通过分割符对分本处理。

2、通过grok使用正则对字符串进行截取处理。

3、使用kv 提取所有的key-value

转载自:http://www.cnblogs.com/qq27271609/p/4762562.html

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值