Windows ELK8 测试: filebeat8+kakfa+logstash8+elasticsearch8+kibana8 搭建日志系统
1、程序版本
2、架构示意图
3、下载地址
filebeat 【官网】
logstash【官网】
elasticsearch【官网】
kibana【官网】
kafka【请自行百度】
4、配置elasticsearch:elasticsearch-8.14.3\config\elasticsearch.yml 并启动
cluster.name: my-application
network.host: 0.0.0.0
http.port: 9200
xpack.security.enabled: false
xpack.security.enrollment.enabled: false
xpack.security.http.ssl:
enabled: false
keystore.path: certs/http.p12
xpack.security.transport.ssl:
enabled: false
verification_mode: certificate
keystore.path: certs/transport.p12
truststore.path: certs/transport.p12
cluster.initial_master_nodes: ["LAPTOP-EA485IM6"]
http.host: 0.0.0.0
启动:elasticsearch-8.14.3\bin\elasticsearch.bat
5、配置kibana:kibana-8.14.3\config\kibana.yml, 并启动
server.port: 5601
server.host: "localhost"
elasticsearch.hosts: ["http://127.0.0.1:9200"]
i18n.locale: "zh-CN"
启动:kibana-8.14.3\bin\kibana.bat
6、配置kafka 并启动
【请自行百度】
7、配置filebeat:filebeat-8.14.3\filebeat.yml, 并启动
filebeat.inputs:
- type: log
enabled: true
paths:
- C:\eclipse-workspace\demo18\logs\*
multiline.pattern: '^[[:space:]]'
multiline.negate: false
multiline.match: after
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
# 可选:添加更多处理器,如添加字段、重命名等
output.kafka:
enabled: true
hosts: ["127.0.0.1:9092"]
topic: "demo18"
启动:filebeat-8.14.3\filebeat.exe
注意:请修改成自己的日志文件目录
8、配置logstash:logstash-8.14.3\config\logstash.conf, 并启动
input {
kafka {
bootstrap_servers => "127.0.0.1:9092"
topics => ["demo18"]
codec => "json" # 假设 Kafka 消息是 JSON 格式
decorate_events => true # 启用元数据装饰,添加 Kafka 相关信息
}
}
filter {
# 提取并平展嵌套的字段
mutate {
rename => { "[@metadata][beat]" => "beat" }
rename => { "[@metadata][type]" => "doc_type" }
rename => { "[@metadata][version]" => "filebeat_version" }
rename => { "[ecs][version]" => "ecs_version" }
rename => { "[log][offset]" => "log_offset" }
rename => { "[log][file][path]" => "log_file_path" }
rename => { "[input][type]" => "input_type" }
rename => { "[host][name]" => "host_name" }
rename => { "[agent][id]" => "agent_id" }
rename => { "[agent][name]" => "agent_name" }
rename => { "[agent][type]" => "agent_type" }
rename => { "[agent][version]" => "agent_version" }
rename => { "[agent][ephemeral_id]" => "agent_ephemeral_id" }
rename => { "[@metadata][kafka][timestamp]" => "kafka_timestamp" }
}
# 提取 message 字段中的具体信息
grok {
match => {
"message" => "%{TIMESTAMP_ISO8601:log_timestamp} %{LOGLEVEL:log_level} \[%{DATA:thread}\] %{JAVACLASS:logger} \[%{JAVACLASS:class} \: %{NUMBER:line_number}\] - %{GREEDYDATA:log_message}"
}
}
# 使用 Kafka 的时间戳作为 @timestamp,如果有提供的话
date {
match => [ "kafka_timestamp", "ISO8601" ]
target => "@timestamp"
}
# 如果 Kafka 时间戳不可用,回退使用日志的时间戳
date {
match => [ "log_timestamp", "ISO8601" ]
target => "@timestamp"
add_field => { "used_fallback_timestamp" => "true" }
}
# 移除不必要的原始字段
mutate {
remove_field => ["@metadata"]
remove_field => ["event"]
remove_field => ["input"]
remove_field => ["log"]
remove_field => ["ecs"]
remove_field => ["agent"]
}
}
output {
elasticsearch {
hosts => ["http://127.0.0.1:9200"]
index => "logstash-%{+YYYY.MM.dd}"
}
stdout {
codec => rubydebug
}
}
启动:.\bin\logstash.bat -f config\logstash.conf
9、查看kafka是否收到日志
kafka Message如下:
{
"@timestamp": "2024-07-31T08:13:08.809Z",
"@metadata": {
"beat": "filebeat",
"type": "_doc",
"version": "8.14.3"
},
"input": {
"type": "log"
},
"ecs": {
"version": "8.0.0"
},
"host": {
"name": "laptop-ea485im6",
"id": "a0d0c7a2-9e8c-44cd-be76-012a876dc15b",
"ip": [
"fe99::a4e3:77c2:edb0:e11b",
"16.254.49.93"
],
"mac": [
"00-50-56-C0-00-01"
],
"hostname": "laptop-ea485im6",
"architecture": "x86_64",
"os": {
"version": "10.0",
"family": "windows",
"name": "Windows 11 Pro",
"kernel": "10.0.22621.3880 (WinBuild.160101.0800)",
"build": "22631.3880",
"type": "windows",
"platform": "windows"
}
},
"agent": {
"name": "LAPTOP-EA121IM8",
"type": "filebeat",
"version": "8.14.3",
"ephemeral_id": "88178fcc-3214-1234-97db-8ba06fcc0786",
"id": "ed82c66e-be8b-1996-8888-5f321dc72478"
},
"log": {
"offset": 783466,
"file": {
"path": "eclipse-workspace\\demo18\\logs\\all.log"
}
},
"message": "2024-07-31 16:13:08.685 ERROR [http-nio-8818-exec-1] c.e.d.elk.controller.ElkController [ElkController.java : 30] - null"
}
10、查看elasticsearch是否收到日志:http://localhost:5601/
Elasticsearch Message如下:
{
"took": 698,
"timed_out": false,
"_shards": {
"total": 1,
"successful": 1,
"skipped": 0,
"failed": 0
},
"hits": {
"total": {
"value": 5040,
"relation": "eq"
},
"max_score": null,
"hits": [
{
"_index": "logstash-2024.07.31",
"_id": "ZmgHCJEB8hjbTi-Efq_S",
"_score": null,
"_source": {
"class": "ElkController.java",
"ecs_version": "8.0.0",
"agent_version": "8.14.3",
"kafka_timestamp": 1722416593544,
"host_name": "laptop-ea485im6",
"log_file_path": ""eclipse-workspace\demo18\logs\all.log""",
"doc_type": "_doc",
"agent_name": "LAPTOP-EA885IM6",
"agent_ephemeral_id": "88178fcc-0317-4190-97db-8ba06fcc0786",
"input_type": "log",
"line_number": "30",
"log_timestamp": "2024-07-31 17:03:13.173",
"beat": "filebeat",
"log_level": "ERROR",
"logger": "c.e.d.elk.controller.ElkController",
"tags": [
"_dateparsefailure"
],
"used_fallback_timestamp": "true",
"host": {
"mac": [
"11-22-56-C0-11-01"
],
"ip": [
"fe80::a4e2:88c2:edb0:e00b",
"16.254.49.93"
],
"hostname": "laptop-ea123im6",
"id": "a0d0c7a2-9e8c-44cd-be76-012a876dc15b",
"architecture": "x86_64",
"os": {
"build": "22631.3880",
"type": "windows",
"platform": "windows",
"name": "Windows 11 Pro",
"kernel": "10.0.22621.3880 (WinBuild.160101.0800)",
"version": "10.0",
"family": "windows"
}
},
"log_message": "null",
"log_offset": 1566877,
"agent_id": "ed82c66e-be8b-4995-9b56-5f321dc72478",
"filebeat_version": "8.14.3",
"agent_type": "filebeat",
"@version": "1",
"thread": "http-nio-8818-exec-1",
"@timestamp": "2024-07-31T09:03:13.173Z",
"message": "2024-07-31 17:03:13.173 ERROR [http-nio-8818-exec-1] c.e.d.elk.controller.ElkController [ElkController.java : 30] - null"
},
"sort": [
1722416593173
]
}
]
}
}
11、现在可以结合kibana可视化图表开始你的日志分析之旅了
先将把索引创建为视图后统计没秒告警数【示例】