# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.
input {
stdin {
}
jdbc {
jdbc_connection_string => "xxxxxxxx"
jdbc_user => "xxxxxx"
jdbc_password => "xxxxxx"
jdbc_driver_library => "G:\datajdbc\mysql-connector-java-5.1.46.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "50000"
statement_filepath => "G:\datajdbc\mysql.sql"
schedule => "* * * * *"
}
}
filter {
aggregate {
task_id => "%{parentid}"
code => "
map['parentid'] = event.get('parentid')
map['uuid'] = event.get('uuid')
map['desc1'] = event.get('desc1')
map['desc2'] = event.get('desc2')
...........
map['subInfos'] ||= []
map['subInfos'] << {
'subinfoid' => event.get('subinfoid'),
'parentid' => event.get('parentid'),
........
}
event.cancel()
"
push_previous_map_as_event => true
timeout => 3
}
mutate {
remove_field =>["@timestamp","@version"]
}
}
output {
elasticsearch {
action => "index"
document_id => "%{parentid}"
document_type => "test"
hosts => ["127.0.0.1:9200"]
index => "test_index"
manage_template => true
template_name => "test_template"
template_overwrite => true
template => "G:\elastic\logstash-6.7.1\template\logstash-ik.json"
}
stdout {
codec => json_lines
}
}