安装
1.安装docker
yum install -y docker-ce
2.docker拉取 ELK(Elasticsearch、Logstash、Kibana)
docker pull elasticsearch:7.4.2
docker pull kibana:7.4.2
docker pull logstash:7.4.2
3.挂载目录(按需)
#elasticsearch
mkdir -p /mydata/elasticsearch/config
mkdir -p /mydata/elasticsearch/data
mkdir -p /mydata/elasticsearch/logs
echo "http.host: 0.0.0.0" >> /mydata/elasticsearch/config/elasticsearch.yml
chmod -R 777 /mydata/elasticsearch
#kibana
mkdir -p /mydata/kibana/config
mkdir -p /mydata/kibana/data
chmod -R 777 /mydata/kibana
#logstash
mkdir -p /mydata/logstash/config
mkdir -p /mydata/logstash/data
chmod -R 777 /mydata/logstash
4.启动
准备工作
1.kibana设置汉化(按需)
kibana.yml文件添加 i18n.locale: “zh-CN”
2.logstash确保有这四个文件夹
3.driver文件夹放入对应sql包
4.进入/mydata/logstash/config
编辑 logstash.yml
node.name: logstash-1
path.logs: /usr/share/logstash/logs
config.test_and_exit: false
config.reload.automatic: false
config.reload.interval: 60s
config.debug: true
log.level: debug
http.host: 0.0.0.0
编辑 logstash.conf 设置数据增量同步 注:确保你的索引在es已创建
input {
stdin {
}
jdbc {
jdbc_connection_string => "jdbc:mysql://你的数据库ip:3306/table?characterEncoding=UTF-8&useUnicode=true&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=Asia/Shanghai&allowMultiQueries=true"
jdbc_user => "你的账号"
jdbc_password => "你的密码"
jdbc_driver_library => "/mydata/logstash/driver/mysql-connector-java-5.1.27.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_page_size => "300000"
statement => "SELECT
id,
name,
age,
update_time
FROM
student
WHERE
update_time > :sql_last_value"
schedule => "* * * * *"
use_column_value => true
tracking_column_type => "timestamp"
tracking_column => "update_time "
record_last_run => true
jdbc_default_timezone => "Asia/Shanghai"
}
}
output {
elasticsearch {
hosts => ["你的服务器ip:9200"]
index => "你的索引"
document_id => "%{id}"
}
stdout {
codec => json_lines
}
}
编辑 pipelines.yml 指定sql读取文件
- pipeline.id: main
path.config: "/usr/share/logstash/config/logstash.conf"
编辑 jvm.options 设置大小
-Xmx1024m
-Xms512m
按挂载目录在docker容器依次启动Elasticsearch、kibana、logstash命令
1.启动Elasticsearch
docker run --name elasticsearch -p 9200:9200 -p 9300:9300 \
-e "discovery.type=single-node" \
-e ES_JAVA_OPTS="-Xms64m -Xmx512m" \
-v /mydata/elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml \
-v /mydata/elasticsearch/data:/usr/share/elasticsearch/data \
-v /mydata/elasticsearch/plugins:/usr/share/elasticsearch/plugins \
-d elasticsearch:7.4.2
2.启动Kibana
docker run --name kibana -e ELASTICSEARCH_HOSTS=http://你的服务器IP:9200 -p 5601:5601 \
-d kibana:7.4.2
3.启动Logstash
docker run -it --name logstash \
-v /mydata/logstash/config:/usr/share/logstash/config \
-v /mydata/logstash/data:/usr/share/logstash/data \
-v /mydata/logstash/pipeline:/usr/share/logstash/pipeline \
-v /mydata/logstash/driver:/usr/share/logstash/driver \
-d logstash:7.4.2
5.启动完成
打开192.168.56.10:5601 kibana界面
索引操作命令
PUT /testIndex //创建索引
GET /testIndex //获取索引
GET testIndex/_search //查询索引数据
{
"query": {
"match_all": {}
},
"size": 96 //查询条数
}
DELETE testIndex //删除索引
代码手动导入示例
public static void main(String[] args) throws IOException {
try {
Class.forName("com.mysql.cj.jdbc.Driver");
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
// 设置Elasticsearch连接信息
RestHighLevelClient client = new RestHighLevelClient(
RestClient.builder(new HttpHost("es所在服务器ip", 9200, "http")));
// 连接MySQL数据库
try (Connection conn = DriverManager.getConnection("jdbc:mysql://数据库ip:3306/student", "root", "root")) {
Statement stmt = conn.createStatement();
List<Map<String, Object>> resultList = new ArrayList<>();
ResultSet rs = stmt.executeQuery("
SELECT
id,
age,
name,
update_time
FROM
student");
// 将数据写入Elasticsearch中
while (rs.next()) {
Map<String, Object> rowData = new HashMap<>();
rowData.put("id", rs.getString("id"));
rowData.put("age", rs.getBigDecimal("age"));
rowData.put("updateTime ", rs.getDate("update_time "));
rowData.put("name", rs.getString("name"));
resultList.add(rowData);
}
BulkRequest bulkRequest = new BulkRequest();
resultList.forEach(document -> {
bulkRequest.add(new IndexRequest("testIndex").id(document.get("id").toString()).source(JSON.toJSONString(document), XContentType.JSON));
});
BulkResponse bulk = client.bulk(bulkRequest, ESconfig.COMMON_OPTIONS);
System.out.println(JSON.toJSONString(bulk));
} catch (SQLException e) {
e.printStackTrace();
} finally {
client.close();
}
}
ESconfig.java
import org.apache.http.HttpHost;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class ESconfig {
public static final RequestOptions COMMON_OPTIONS;
static {
RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
COMMON_OPTIONS = builder.build();
}
@Bean
public RestHighLevelClient restHighLevelClient() {
return new RestHighLevelClient(RestClient.builder(new HttpHost("服务器ip", 9200, "http")));
}
}