[下载地址:][1]
[1]: https://www.elastic.co/downloads
首先下载并解压ES:
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.5.0.tar.gz
tar -zxvf elasticsearch-6.5.0.tar.gz
cd elasticsearch-6.5.0
运行ES:
bin/elasticsearch
报错:
[o.e.b.ElasticsearchUncaughtExceptionHandler] [unknown] uncaught exception in thread [main]
org.elasticsearch.bootstrap.StartupException: java.lang.RuntimeException: can not run elasticsearch as root
at org.elasticsearch.bootstrap.Elasticsearch.init(Elasticsearch.java:140) ~[elasticsearch-6.5.0.jar:6.5.0]
at org.elasticsearch.bootstrap.Elasticsearch.execute(Elasticsearch.java:127) ~[elasticsearch-6.5.0.jar:6.5.0]
at org.elasticsearch.cli.EnvironmentAwareCommand.execute(EnvironmentAwareCommand.java:86) ~[elasticsearch-6.5.0.jar:6.5.0]
at org.elasticsearch.cli.Command.mainWithoutErrorHandling(Command.java:124) ~[elasticsearch-cli-6.5.0.jar:6.5.0]
at org.elasticsearch.cli.Command.main(Command.java:90) ~[elasticsearch-cli-6.5.0.jar:6.5.0]
at org.elasticsearch.bootstrap.Elasticsearch.main(Elasticsearch.java:93) ~[elasticsearch-6.5.0.jar:6.5.0]
at org.elasticsearch.bootstrap.Elasticsearch.main(Elasticsearch.java:86) ~[elasticsearch-6.5.0.jar:6.5.0]
Caused by: java.lang.RuntimeException: can not run elasticsearch as root
at org.elasticsearch.bootstrap.Bootstrap.initializeNatives(Bootstrap.java:103) ~[elasticsearch-6.5.0.jar:6.5.0]
at org.elasticsearch.bootstrap.Bootstrap.setup(Bootstrap.java:170) ~[elasticsearch-6.5.0.jar:6.5.0]
at org.elasticsearch.bootstrap.Bootstrap.init(Bootstrap.java:333) ~[elasticsearch-6.5.0.jar:6.5.0]
at org.elasticsearch.bootstrap.Elasticsearch.init(Elasticsearch.java:136) ~[elasticsearch-6.5.0.jar:6.5.0]
... 6 more
主要看这句: can not run elasticsearch as root
不能以root身份运行。
那我们修改下权限
chown hero:hero elasticsearch-6.5.0 -R
接着再运行,成功,访问 :http://127.0.0.1:9200/
{
name: "K9td8Q0",
cluster_name: "elasticsearch",
cluster_uuid: "wVMgbOZKQ_GlwdDSDpP2Iw",
version: {
number: "6.5.0",
build_flavor: "default",
build_type: "tar",
build_hash: "816e6f6",
build_date: "2018-11-09T18:58:36.352602Z",
build_snapshot: false,
lucene_version: "7.5.0",
minimum_wire_compatibility_version: "5.6.0",
minimum_index_compatibility_version: "5.0.0"
},
tagline: "You Know, for Search"
}
接着安装logstash:
wget https://artifacts.elastic.co/downloads/logstash/logstash-6.5.0.tar.gz
tar -zxvf logstash-6.5.0.tar.gz
cd logstash-6.5.0.tar.gz
解压完进入目录,然后创建配置文件,
这里我存放的路径直接放到了config下:
vim my.conf
input {
jdbc {
#驱动绝对路径
jdbc_driver_library => "/root/mysql-connector-java-5.1.43-bin.jar"
#驱动类名
jdbc_driver_class => "com.mysql.jdbc.Driver"
#连接池配置, mysql数据库连接 DAHLIA 为数据库名
jdbc_connection_string => "jdbc:mysql://10.15.206.203:3306/DAHLIA"
#连接池配置, 使用前是否验证连接
jdbc_validate_connection => true
#连接池配置, 数据库用户名
jdbc_user => "foo"
#连接池配置, 数据库密码
jdbc_password => "bar"
#连接池配置, 是否启用分页, 启用后,需要使用 jdbc_page_size 设置每次查询的结果集大小
jdbc_paging_enabled => true
#连接池配置, 每次查询的结果集大小, 必须设置 jdbc_paging_enabled 为 true 才有效
jdbc_page_size => "1000"
#查询结果集的语句
statement => "select * from tablename where update_time >:sql_last_value"
#使用增量列值,而不是时间戳; 默认值为false
#use_column_value => true
#当设置 use_column_value 为true时(不跟踪时间戳查询), 将跟踪此列值
#tracking_column => "update_time"
#不设置时,默认值为 numeric; 可选 numeric, timestamp
tracking_column_type => "numeric"
#每分钟执行一次
schedule => "* * * * *"
last_run_metadata_path => "syncpoint_table" ## 记录最后一条记录的值,可能是id值,也可能是最后一条记录的日期
}
}
filter {
date {
locale => "zh"
timezone => "Asia/Shanghai"
# match => [ "update_time","ISO8601","yyyy-MM-dd HH:mm:ss.SSS", "UNIX" ]
match => ["timestamp","dd/MMM/yyyy:HH:mm:ss Z"]
# target => "@timestamp"
}
# ruby {
# code => "event.timestamp.time.localtime + 8*60*60"
# }
}
output {
stdout{ codec => json_lines }
elasticsearch {
hosts => ["localhost:9200"]
# flush_size => 5000
# index => "monitor-%{+YYYY.MM.dd}" ## kibana的检索index
index => "ai_web"#ES索引名称
document_type => "primary_data"#EStype名称,这里运行logstash,ES将会自动创建对应的index和type
document_id => "%{id}"
# idle_flush_time => 10
}
}
1. 安装 logstash 插件 logstash-input-jdbc
bin/logstash-plug install --no-verify logstash-input-jdbc
接着运行logstash:
bin/logstash -f /data/logstash-6.5.0/config/my.conf
这里主要是将数据库表 primary_data 中的数据同步到ES
最后我们安装Kibana:
下载解压步骤就不说了:
直接配置:
vim config/kibana.yml
# 将默认配置改成如下:
server.port: 5601
server.host: "0.0.0.0"#不限定ip访问
elasticsearch.url: "http://192.168.2.41:9200"#es地址
kibana.index: ".kibana"
启动:
bin/kibana
后台运行:
nohup bin/kibana > kibana_run.log 2>&1 &
启动后打开浏览器访问 http://127.0.0.1:5601 浏览 kibana 界面