**
Logstash导入数据到Elasticsearch时自定义mapping
准备:
Logstash环境win7,版本6.4.0
Elasticsearch环境win7中的docker,版本6.4.0
Elasticsearch装ik分词器点这里
一开始的想法也是在logstash.conf配置文件里output中加
template,指定自己写好的mapping.json文件,结果试了n次还是覆盖不了
后来我是这样解决的:
1,在Elasticsearch上先创建好索引以及mapping映射
put/my_index
{
"settings": {
"index.refresh_interval": "5s"
},
"mappings": {
"doc": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"id": {
"type": "long"
},
"title": {
"type": "text",
"analyzer": "ik_max_word"
},
"content": {
"type": "text",
"analyzer": "ik_max_word"
},
"department": {
"type": "keyword"
},
"release_time": {
"type": "date"
},
"del_flag": {
"type": "long"
}
}
}
}
}
然后cmd再到logstash的bin目录下执行.\logstash -f .\logstash.conf
logstash.conf文件如下
input {
jdbc {
jdbc_driver_library => "D:\work\mysql-connector-java-5.1.43.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://你的数据库IP:段口/数据库名?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=GMT%2B8"
jdbc_user => "root"
jdbc_password => "root"
jdbc_default_timezone =>"Asia/Shanghai"
tracking_column => "id"
use_column_value => true
schedule => "* * * * * *"
statement => "你的sql查询语句"
}
}
//切换到北京时间
filter {
ruby {
code => "event.set('timestamp', event.get('@timestamp').time.localtime + 8*60*60)"
}
ruby {
code => "event.set('@timestamp',event.get('timestamp'))"
}
mutate {
remove_field => ["timestamp"]
}
}
output {
elasticsearch {
hosts => "192.168.99.100:9200"
index => "my_index"
timeout => 300
document_id => "%{id}"
}
}
OK