logstash配置文件
logstash配置文件
实现每分钟监控mysql表的number列的数据更新情况并作为生产者通过logstash输入kafka
es作为消费者通过logstash导入信息
input
input {
jdbc {
jdbc_connection_string => "jdbc:mysql://47.111.230.222:3306/new-jibing"
jdbc_user => "sjcj"
jdbc_password => "123456"
jdbc_driver_library => "C:/Users/80686/Desktop/sjcj/pdi-ce-8.2.0.0-342/data-integration/lib/mysql-connector-java-5.1.5-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "50000"
use_column_value => true
tracking_column => "number"
statement =>"SELECT * FROM shujuzhenghe WHERE number> :sql_last_value"
schedule => "* * * * *"
codec => plain{ charset => "GBK" }
}
}
output {
kafka {
topic_id => "test"
codec => plain{ charset => "GBK" }
bootstrap_servers => "localhost:9092"
}
stdout {
codec => json_lines
}
}
output
input {
kafka {
bootstrap_servers => "localhost:9092"
topics => ["test"]
auto_offset_reset => "latest"
consumer_threads => 5
type => "gb"
codec => plain{ charset => "GBK" }
}
}
filter {
grok {
match => {"message"=>"%{COMBINEDAPACHELOG}"}
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "gaibian"
codec => plain{ charset => "GBK" }
}
stdout {
codec => json_lines
}
job.json
源代码
{
"job": {
"setting": {
"speed": {
"channel": 1
}
},
"content": [
{
"reader": {
"name": "txtfilereader",
"parameter": {
"path": ["C://Users//80686//Desktop//python_work//jibing//jibingjianjie.csv"],
"encoding": "gb18030",
"column": [
{
"index": 0,
"type": "string"
},
{
"index": 1,
"type": "string"
}
],
"fieldDelimiter": ",",
"skipHeader": "False"
}
},
"writer": {
"name":"mysqlwriter",
"parameter":{
"writeMode": "insert",
"username":"sjcj",
"password":"123456",
"column":[
"疾病名称",
"简介"
],
"connection":[
{
"jdbcUrl":"jdbc:mysql://47.111.230.222:3306/jibing?useUnicode=true&characterEncoding=gb18030",
"table": [ "简介" ]
}
]
}
}
}
]
}
}