springboot+logstash+es+kibana 日志服务

安装es

docker run -d --name es -e "ES_JAVA_OPTS=-Xms512m -Xmx512m" -e "discovery.type=single-node" -v /opt/elasticsearch/logs:/usr/share/elasticsearch/logs -v /opt/elasticsearch/data:/usr/share/elasticsearch/data -v /opt/elasticsearch/plugins:/usr/share/elasticsearch/plugins --privileged --network es-net -p 9200:9200 -p 9300:9300 elasticsearch:7.12.0

安装 kibana

   docker run -d  --name kibana -e ELASTICSEARCH_HOSTS=http://es:9200 --network es-net -p 5601:5601 kibana:7.12.0

安装logstash

docker run -d   --name logstash --env TZ=Asia/Shanghai -v /opt/logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf -p 5044:5044  --network es-net   logstash:7.12.0

修改ES 访问密码

   ./bin/elasticsearch-setup-passwords interactive

修改kibana

server.name: kibana
server.host: "0"
elasticsearch.hosts: [ "http://es:9200" ]
monitoring.ui.container.elasticsearch.enabled: true
xpack.security.enabled: true
elasticsearch.username: "elastic"
elasticsearch.password: "*****"

修改logstash

配置文件logstash.yml

http.host: "0.0.0.0"
xpack.monitoring.elasticsearch.username: "elastic"
xpack.monitoring.elasticsearch.password: "***"
xpack.monitoring.elasticsearch.hosts: [ "http://es:9200" ]

修改 pipeline一logstash.conf

input {
  tcp {
    mode => "server"
    host => "0.0.0.0"  # 允许任意主机发送日志
    port => 5044 # logstash暴露的端口
    codec => json_lines    # 数据格式
  }
}

filter {
    ruby {
        # 将时间转成毫秒的时间戳
        code => "event.set('createTime',(event.get('@timestamp').to_f.round(3)*1000).to_i)"
    }
    ruby {
        # 设置一个自定义字段'timestamp'[这个字段可自定义],将logstash自动生成的时间戳中的值加8小时,赋给这个字段
    code => "
          event.set('timestamp', event.get('@timestamp').time.localtime + 8*3600)
          event.set('threadName', event.get('thread_name'))
          event.set('levelValue', event.get('level_value'))
          event.set('loggerName', event.get('logger_name'))
          event.set('callerClassName', event.get('caller_class_name'))
          event.set('callerFileName', event.get('caller_file_name'))
          event.set('callerLineNumber', event.get('caller_line_number'))
          event.set('callerMethodName', event.get('caller_method_name'))
          event.set('stackTrace', event.get('stack_trace'))
        "
    }
    ruby {
        # 将自定义时间字段中的值重新赋给@timestamp
        # code => "event.set('@timestamp',event.get('timestamp'))"
    }
    mutate {
        # 删除自定义字段
        remove_field => ["timestamp","thread_name","level_value","logger_name","HOSTNAME","caller_class_name","caller_file_name","caller_line_number","caller_method_name","stack_trace"]
    }
}

output {
  elasticsearch {
      hosts  => ["http://es:9200"]   # ElasticSearch 的地址和端口
      user   => "elastic"
      password => "*****."
      index  => "%{[app_name]}"         # 指定索引名
     # index  => "application-logs-%{[app_name]}-%{[profiles_active]}-%{+YYY-MM}"         # 指定索引名

      # document_type => "_doc"
      codec  => json
      # 是否使用模板创建索引,在模板中可提前定义索引的字段类型
      manage_template => true
      # 索引模板文件
      template => "/usr/share/logstash/pipeline/mappings/application-log-mapping.json"
      template_name => "application-log"
      # 在logstash重启后,是否使用模板文件覆盖es中已存在的索引模板
      template_overwrite => true
  }
  #stdout {
  #  codec => rubydebug
  #}
  file {
    path => "/usr/share/logstash/pipeline/logs/%{+YYYY-MM-dd}-%{app_name}-%{profiles_active}.log"
  }
}

修改mappings一application-log-mapping.json

{
 "index_patterns": ["application-log*"],
 "order": 1,
 "mappings" : {
   "dynamic_templates" : [{
     "message_field" : {
       "match" : "message",
       "match_mapping_type" : "string",
       "mapping" : {
         "type" : "keyword",
         "index" : true
       }
     }
   }, {
     "string_fields" : {
       "match" : "*",
       "match_mapping_type" : "string",
       "mapping" : {
         "type" : "keyword",
         "index" : true
       }
     }
   }],
   "properties" : {
     "@timestamp": { "type": "date" },
     "@version": { "type": "keyword", "index": true },
     "message": {
       "type": "text",
       "analyzer": "ik_max_word"
     }
   }
 }
}

Springboot (版本2.5.15)

集成logstash依赖

 <dependency>
           <groupId>net.logstash.logback</groupId>
           <artifactId>logstash-logback-encoder</artifactId>
           <version>6.6</version>
       </dependency>

修改 logback.xml

 <appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
       <!--配置logStash 服务地址-->
       <destination>logstash IP:5044</destination>
       <!-- 日志输出编码 -->
       <encoder charset="UTF-8"
                class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
           <providers>
               <timestamp>
                   <timeZone>UTC</timeZone>
               </timestamp>
               <pattern>
                   <pattern>
                       {
                       "env": "prod",
                       "service":"crm-log",
                       "timestamp":"%d{yyyy-MM-dd HH:mm:ss.SSS}",
                       "date":"%d{yyyy-MM-dd HH:mm:ss.SSS}",
                       "level":"%level",
                       "thread": "%thread",
                       "logger": "%logger{36}",
                       "msg":"%msg",
                       "exception":"%exception",
                       "app_name":"crm-log",
                       "profiles_active":"prod"
                       }
                   </pattern>
               </pattern>
           </providers>
       </encoder>
   </appender>
   
     <root level="info">
     
       <appender-ref ref="LOGSTASH" />

   </root>

kibana 添加索引

management->stach management->kibana->索引模式->关联相应索引
在这里插入图片描述

Analytics->discover->选择相应索引
在这里插入图片描述

  • 5
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值