input{
kafka {
bootstrap_servers => "****:9092" //kafaka启动端口
topics => ["test"]
add_field => {"myid"=>"journal"} //添加字段
}
jdbc {
jdbc_connection_string => "jdbc:oracle:thin:@****:tissitdb2" //数据库地址
jdbc_user => "***" //用户名
jdbc_password => "*****" //用户密码
jdbc_driver_library => "/home/elk/ojdbc6.jar" // important
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
statement => "select * from IBS_RETCODE"
schedule => "* * */1 * *" //分 时 日 月 年
type => "jdbc-error-mapping"
add_field => {"myid"=>"jdbc-error-mapping"}
record_last_run => true
use_column_value => false
tracking_column => "update_time"
}
}
filter{
if [myid] == "journal" {
grok{
match => {
"message" => "^\[[#][#]\][@][T][I][M][E]\{(?<timestamp>[0-9]{4}[-][0-9]{2}[-][0-9]{2}[ ][0-9]{2}[:][0-9]{2}[:][0-9]{2}[\.][0-9]{3})\}[@][U][U][I][D]\{(?<uuid>[0-9|a-z|:| |-]*)\}[@][T][Y][P][E]\{(?<type>[\s\S]*)\}[@][S][E][R][I]\{(?<seri>[0-9|a-z|-]*)\}[@][D][E][S][C]\{(?<desc>[\s\S]*)\}[@][D][A][T][A]\{(?<data>[\s\S]*)\}[@][E][X][P][T]\{(?<expt>[\s\S]*)\}[@][B][S][I][D]\{(?<bsid>[\s\S]*)\}\[\$\$\]$"
}
}
}
date {
match => ["timestamp", "yyyy-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss.SSS"]
target => "@timestamp"
timezone => "+08:00" //解决es时间相差8小时问题
}
mutate {
remove_field => ["host", "@version", "ecs", "input", "_type", "agent", "data", "log","type", "seri", "tags"] //移除不需要字段
}
}
output{
if [myid] == "journal" {
elasticsearch{
hosts => "127.0.0.1:9200" //输出至es
index => "name_one_%{{+yyyy_MM_dd}"
}
}
if [myid] == "jdbc-error-mapping" {
elasticsearch{
hosts => "127.0.0.1:9200" /输出至es
index => "name_two_%{+yyyy_MM_dd}"
}
}
stdout {
codec => json_lines
}
}
logstash配置 (分别输入kafka和数据库)
最新推荐文章于 2023-06-11 21:52:11 发布