搭建ELK日志平台

日志平台计划搭建scrm测试环境和线上环境两套,通过Logstash做中间转接
本文采用项目集成logback的方式作为logstash的输入,同时还可以采用filebeat的方式

安装ElasticSearch

#拉取镜像
docker pull elasticsearch:7.7.0

#启动镜像
docker run --name elasticsearch -d -e ES_JAVA_OPTS="-Xms512m -Xmx512m" -e "discovery.type=single-node" -p 9200:9200 -p 9300:9300 elasticsearch:7.7.0

安装kibana

docker pull kibana:7.5.1

# 在目录创建文件
root@xiaoxinpro13:/usr/local/kibana/conf# ls
kibana.yml
root@xiaoxinpro13:/usr/local/kibana/conf# pwd
/usr/local/kibana/conf
root@xiaoxinpro13:/usr/local/kibana/conf# 


# 文件内容
#
# ** THIS IS AN AUTO-GENERATED FILE **
#

# Default Kibana configuration for docker target
server.name: kibana
server.host: "0"
elasticsearch.hosts: [ "http://192.168.31.190:9200" ]
xpack.monitoring.ui.container.elasticsearch.enabled: true

启动容器

docker run -d \
  --name=kibana \
  --restart=always \
  -p 5601:5601 \
  -v /usr/local/kibana/conf/kibana.yml:/usr/share/kibana/config/kibana.yml \
  kibana:7.5.1

项目集成方式

项目引入依赖

 <dependency>
            <groupId>net.logstash.logback</groupId>
            <artifactId>logstash-logback-encoder</artifactId>
            <version>5.3</version>
        </dependency>

配置logback

可以通过 springProfile name="test"来区分不同环境对应的日志输出

 <appender name="LOGSTASH_TEST" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
        <destination>172.16.6.xxx:9600</destination>
        <!-- 日志输出编码 -->
        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
            <!--            调用fields 中appname字段变量的值为索引。-->
            <customFields>{"appname":"scrm-log-web"}</customFields>
            <providers>
                <timestamp>
                    <timeZone>UTC</timeZone>
                </timestamp>
                <pattern>
                    <pattern>
                        {
                        "severity": "%level",
                        "service": "${springAppName:-}",
                        "port": "${serverPort:-}",
                        "trace": "%X{X-B3-TraceId:-}",
                        "span": "%X{X-B3-SpanId:-}",
                        "exportable": "%X{X-Span-Export:-}",
                        "pid": "${PID:-}",
                        "thread": "%thread",
                        "class": "%logger{40}",
                        "rest": "%message"
                        }
                    </pattern>
                </pattern>
            </providers>
        </encoder>
    </appender>


 <root level="info">
        <springProfile name="test">
            <appender-ref ref="LOGSTASH_TEST"/>
        </springProfile>
    </root>

配置的连接地址为logstash的服务端口
安装logstash

下载完之后,直接解压即可

[root@izbp14j2amyqhhh99qufi7z logstash-7.7.0]# pwd
/usr/local/logstash-7.7.0
[root@izbp14j2amyqhhh99qufi7z logstash-7.7.0]# 

logstash配置文件
去到/usr/local/logstash-7.7.0/config路径新建配置文件scrm-test-log.conf如下

[root@izbp14j2amyqhhh99qufi7z logstash-7.7.0]# cd config/
[root@izbp14j2amyqhhh99qufi7z config]# pwd
/usr/local/logstash-7.7.0/config
[root@izbp14j2amyqhhh99qufi7z config]# ls
jvm.options  log4j2.properties  logstash-sample.conf  logstash.yml  pipelines.yml  scrm-test-log.conf  startup.options
[root@izbp14j2amyqhhh99qufi7z config]# 

scrm-test-log.conf配置文件

input {
  tcp {
    port => 9600
    codec => json_lines
  }
}

output {
  elasticsearch {
    hosts => ["http://172.16.6.xxx:9200","http://172.16.6.xxx:9200"]
    index => ["%{[appname]}-%{+YYYY-MM-dd}"]   
  }
   stdout {
    codec => rubydebug            # 输出到命令窗口
   } 
}

去到路径/usr/local/logstash-7.7.0/bin

[root@izbp14j2amyqhhh99qufi7z bin]# pwd
/usr/local/logstash-7.7.0/bin
[root@izbp14j2amyqhhh99qufi7z bin]# ls
benchmark.sh         ingest-convert.sh  logstash-keystore      logstash.log         pqcheck   setup.bat
cpdump               logstash           logstash-keystore.bat  logstash-plugin      pqrepair  system-install
dependencies-report  logstash.bat       logstash.lib.sh        logstash-plugin.bat  ruby
[root@izbp14j2amyqhhh99qufi7z bin]# 

启动logstash

直接启动便于测试
./logstash -f /usr/local/logstash-7.7.0/config/scrm-test-log.conf
以后台方式启动
nohup ./logstash -f /usr/local/logstash-7.7.0/config/scrm-test-log.conf > ./logstash.log &

Filebeat的方式

安装logstash方式如上
同时在需要监听日志的服务上安装filebeat

[root@izbp1eyzqud3anqii8dh2tz ~]# cd /usr/local/filebeat
[root@izbp1eyzqud3anqii8dh2tz filebeat]# ls
fields.yml  filebeat  filebeat.reference.yml  filebeat.yml  kibana  LICENSE.txt  module  modules.d  NOTICE.txt  README.md
[root@izbp1eyzqud3anqii8dh2tz filebeat]# pwd
/usr/local/filebeat
[root@izbp1eyzqud3anqii8dh2tz filebeat]# 

配置filebeat.yml

- type: log
  id: my-filestream-id
  enabled: true
  paths:
    - /data/logs/scrm/log_info.log  # 要读取的日志文件
    - /data/logs/scrm/log_error.log
  tags: ["scrm-web-log"]  


output.logstash:
  # The Logstash hosts
  hosts: ["172.16.143.xxx:5044"]
# 注释掉es
# output.elasticsearch:
#   # Array of hosts to connect to.
#   hosts: ["localhost:9200"]




# 后台启动
nohup ./filebeat -e -c filebeat.yml > ./filebeat.log &

配置logstash

input {
  beats {
    port => 5044
  }
}

filter {
  if "scrm-web-log" in [tags][0]{

      grok {
          match => {"message" => "%{TIMESTAMP_ISO8601:time} \s*%{DATA:thread}\s* %{LOGLEVEL:level}\s* %{GREEDYDATA:data}"}
      }
      
      date {
      match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"]
      target => "@timestamp"
      timezone => "Asia/Shanghai"
      }

      mutate {
          add_field => { "target_index" => "scrm-web-log-%{+YYYY.MM.dd}" }
      }


  } else if "scrm-marketing-log" in [tags][0]{

      grok {
          match => {"message" => "%{TIMESTAMP_ISO8601:time} \s*%{DATA:thread}\s* %{LOGLEVEL:level}\s* %{GREEDYDATA:data}"}
      }

      date {
      match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"]
      target => "@timestamp"
      timezone => "Asia/Shanghai"
      }

      mutate {
          remove_field => [ "message", "agent" ,"tags", "input","ecs","@version","host","os","container","log" ]
          add_field => { "target_index" => "scrm-marketing-log-%{+YYYY.MM.dd}" }
      }
  }else if "scrm-sync-log" in [tags][0]{

      grok {
          match => {"message" => "%{TIMESTAMP_ISO8601:time} \s*%{DATA:thread}\s* %{LOGLEVEL:level}\s* %{GREEDYDATA:data}"}
      }

      date {
      match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"]
      target => "@timestamp"
      timezone => "Asia/Shanghai"
      }

      mutate {
          remove_field => [ "message", "agent" ,"tags", "input","ecs","@version","host","os","container","log" ]
          add_field => { "target_index" => "scrm-sync-log-%{+YYYY.MM.dd}" }
      }
  }

}

output {
  elasticsearch {
    hosts => ["http://172.16.6.156:9200"]
    index => "%{[target_index]}"
  }
}

通用grok表达式

grok表达式在线测试网

%{TIMESTAMP_ISO8601:time} \s*%{DATA:thread}\s* %{LOGLEVEL:level}\s* %{GREEDYDATA:data}

配置es模版

由于scrm多个模块的项目日志映射到es同一个索引,同时日志索引是以模块名加日期结尾的,为了便于自动识别日志文件,并保证可以按日期删除索引,遂建立索引模版

去到开发工具窗口建立
在这里插入图片描述
在这里插入图片描述

PUT _template/scrm_test_template_default
{
  "index_patterns":[
    "scrm-log*"
  ],
  "settings":{
    "number_of_replicas":1,
    "refresh_interval":"10s",
    "index.search.slowlog.threshold.query.warn": "10s",
    "index.search.slowlog.threshold.query.info": "5s",
    "index.search.slowlog.threshold.query.debug": "2s",
    "index.search.slowlog.threshold.query.trace": "500ms"
  },
 "mappings" : {
      "properties" : {
        "@timestamp" : {
          "type" : "date"
        },
        "@version" : {
          "type" : "text",
          "fields" : {
            "keyword" : {
              "type" : "keyword",
              "ignore_above" : 256
            }
          }
        },
        "appname" : {
          "type" : "text",
          "fields" : {
            "keyword" : {
              "type" : "keyword",
              "ignore_above" : 256
            }
          }
        },
        "corpid" : {
          "type" : "text"
        },
        "host" : {
          "type" : "text",
          "fields" : {
            "keyword" : {
              "type" : "keyword",
              "ignore_above" : 256
            }
          }
        },
        "id" : {
          "type" : "text"
        },
        "level" : {
          "type" : "text",
          "fields" : {
            "keyword" : {
              "type" : "keyword",
              "ignore_above" : 256
            }
          }
        },
        "level_value" : {
          "type" : "long"
        },
        "logger_name" : {
          "type" : "text",
          "fields" : {
            "keyword" : {
              "type" : "keyword",
              "ignore_above" : 256
            }
          }
        },
        "message" : {
          "type" : "text",
          "fields" : {
            "keyword" : {
              "type" : "keyword",
              "ignore_above" : 256
            }
          }
        },
        "orderNum" : {
          "type" : "text"
        },
        "port" : {
          "type" : "long"
        },
        "thread_name" : {
          "type" : "text",
          "fields" : {
            "keyword" : {
              "type" : "keyword",
              "ignore_above" : 256
            }
          }
        }
      }
    },
  "order":0
}

get  _template/scrm_test_template_default

建立完之后所有前缀为scrm-log的索引都会被模版映射到
在这里插入图片描述

备注:建立完索引模版之后,还需要建立索引模式
索引模式的映射字段可以先通过get对应的索引模式获取

get  xxx 索引

配置删除索引
去到该路径创建scrm-test-index-clear.sh文件

[root@izbp14j2amyqhhh99qufi7z bin]# cd /usr/local/script/es/
[root@izbp14j2amyqhhh99qufi7z es]# ls
scrm-test-index-clear.sh
[root@izbp14j2amyqhhh99qufi7z es]# 

给予权限

[root@izbp14j2amyqhhh99qufi7z es]# ll
total 4
-rw-r--rwx 1 root root 599 May 30 11:56 scrm-test-index-clear.sh
[root@izbp14j2amyqhhh99qufi7z es]# chmod +7 scrm-test-index-clear.sh 

配置删除索引脚本

#/bin/bash
#es-index-clear
#只保留某几天内的日志索引-5 days || 12 days ago
THIS_DAYS_AGO=`date "+%Y-%m-%d"`
THREE_DAYS_AGO=`date -d "-3 days" "+%Y-%m-%d"`
FIVE_DAYS_AGO=`date -d "-5 days" "+%Y-%m-%d"`

echo "删除索引开始..."

curl -XDELETE "http://172.16.143.235:9200/scrm-log-web-${FIVE_DAYS_AGO}"       \n
curl -XDELETE "http://172.16.143.235:9200/scrm-log-sync-${FIVE_DAYS_AGO}"      \n
curl -XDELETE "http://172.16.143.235:9200/scrm-log-marketing-${FIVE_DAYS_AGO}" \n
curl -XDELETE "http://172.16.143.235:9200/scrm-log-gateway-${FIVE_DAYS_AGO}"   \n

echo "删除索引结束"

备注:可以设置时间为今日的,同时直接执行脚本测试

[root@izbp14j2amyqhhh99qufi7z es]# sh scrm-test-index-clear.sh 
删除索引开始...
{"error":{"root_cause":[{"type":"index_not_found_exception","reason":"no such index [scrm-log-web-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-web-2022-05-25","index":"scrm-log-web-2022-05-25"}],"type":"index_not_found_exception","reason":"no such index [scrm-log-web-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-web-2022-05-25","index":"scrm-log-web-2022-05-25"},"status":404}curl: (6) Could not resolve host: n; Unknown error
{"error":{"root_cause":[{"type":"index_not_found_exception","reason":"no such index [scrm-log-sync-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-sync-2022-05-25","index":"scrm-log-sync-2022-05-25"}],"type":"index_not_found_exception","reason":"no such index [scrm-log-sync-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-sync-2022-05-25","index":"scrm-log-sync-2022-05-25"},"status":404}curl: (6) Could not resolve host: n; Unknown error
{"error":{"root_cause":[{"type":"index_not_found_exception","reason":"no such index [scrm-log-marketing-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-marketing-2022-05-25","index":"scrm-log-marketing-2022-05-25"}],"type":"index_not_found_exception","reason":"no such index [scrm-log-marketing-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-marketing-2022-05-25","index":"scrm-log-marketing-2022-05-25"},"status":404}curl: (6) Could not resolve host: n; Unknown error
{"error":{"root_cause":[{"type":"index_not_found_exception","reason":"no such index [scrm-log-gateway-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-gateway-2022-05-25","index":"scrm-log-gateway-2022-05-25"}],"type":"index_not_found_exception","reason":"no such index [scrm-log-gateway-2022-05-25]","index_uuid":"_na_","resource.type":"index_or_alias","resource.id":"scrm-log-gateway-2022-05-25","index":"scrm-log-gateway-2022-05-25"},"status":404}curl: (6) Could not resolve host: n; Unknown error
删除索引结束
[root@izbp14j2amyqhhh99qufi7z es]# 

配置定时任务
注意路径为自己的shell脚本路径

输入命令

[root@izbp14j2amyqhhh99qufi7z es]# crontab -e
crontab: no changes made to crontab
[root@izbp14j2amyqhhh99qufi7z es]# 

加入如下内容

0 1 * * * /usr/local/script/es/scrm-test-index-clear.sh

Logstash为日志收集必须掌握知识点
elk四、定时删除elk日志脚本——shell

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值