同步mysql到es
input {
jdbc {
jdbc_connection_string => "jdbc:mysql://127.0.0.1:3306/db"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "/usr/local/service/logstash/extended-files/mysql-connector-java-8.0.18.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "256"
statement => "select id,create_time from user where id > :sql_last_value order by id limit 1000"
use_column_value => true
tracking_column => "id"
# 设置定时任务间隔 含义:分、时、天、月、年,全部为*默认含义为每分钟跑一次任务
schedule => "*/5 * * * * *"
last_run_metadata_path => "/usr/local/service/logstash/temp/jdbc-sql_last_value.yml"
type => "jdbc"
clean_run => false
}
}
filter {
ruby {
code => "event.set('@timestamp', event.get('create_time'))"
}
date {
match => [ "create_time", "MMM dd yyyy HH:mm:ss.SSS" ]
}
}
output {
elasticsearch {
hosts => ["http://127.0.0.1:9200"]
user => "elastic"
password => "elastic"
index => "user_index"
# 设置_docID和数据相同
document_id => "%{id}"
}
}
同步es到es
input {
stdin{
}
elasticsearch {
type => "type类型和output的type类型保持一致"
hosts => ["http://es输入数据ip:es输入数据端口"]
user => "用户名"
index => "es上的索引"
password => "密码"
#设置为true,将会提取ES文档的元数据信息,例如index、type和id。
docinfo => true
}
}
output {
if [type]=="customer_data_py" {
elasticsearch {
hosts => ["http://es目标数据ip:es目标数据端口"]
user => 用户名
password => 密码
#指定同步索引名。input中设置为*,表示同步全部索引;
#output中设置为%{[@metadata][_index]},表示匹配元数据中的index,即迁移前后两台服务器ES的索引相同。
index => "%{[@metadata][_index]}"
#设置为%{[@metadata][_type]},
#表示匹配元数据中索引的type,即迁移前后ES服务器的索引类型相同。
document_type => "%{[@metadata][_type]}"
#设置为%{[@metadata][_id]},
#表示匹配元数据中文档的id,即迁移前后ES服务器的文档id相同。
document_id => "%{[@metadata][_id]}"
}
}
stdout {
codec => json_lines
}
}
同步es到mysql
input {
elasticsearch {
hosts => ["http://127.0.0.1:9200"]
user => "elastic"
password => "elastic"
index => "user_index"
}
}
filter {
json{
source => "doc"
}
date {
match => ["[doc][create_time]","yyyy-MM-dd HH:mm:ss", "ISO8601"]
target => "[create_time]"
}
}
output {
jdbc {
jdbc_connection_string => "jdbc:mysql://127.0.0.1:3306/db"
jdbc_user => "root"
jdbc_password => "root"
jdbc_driver_library => "/usr/local/service/logstash/extended-files/mysql-connector-java-8.0.18.jar"
statement => ["INSERT INTO sys_user (id, username, create_time) VALUES (?,?,?)","[doc][id]","[doc][username]","[doc][createTime]" ]
}
}