docker搭建ELK日志采集并查询日志

本文详细介绍了如何使用Docker分别安装和配置Elasticsearch、Logstash、Kibana,并展示了如何在SpringBoot应用中集成Logstash进行日志处理,以及通过Kibana查询和可视化日志。
摘要由CSDN通过智能技术生成

1. docker安装Elasticsearch

docker pull elasticsearch:7.6.2

mkdir -p  /mydata/elasticsearch/plugins  /mydata/elasticsearch/data

chmod 777 /mydata/elasticsearch/data/

docker run -p 9200:9200 -p 9300:9300 --name elasticsearch \
-e "discovery.type=single-node" \
-e "cluster.name=elasticsearch" \
-v /mydata/elasticsearch/plugins:/usr/share/elasticsearch/plugins \
-v /mydata/elasticsearch/data:/usr/share/elasticsearch/data \
-d elasticsearch:7.6.2

2. docker安装Logstash

docker pull logstash:7.6.2

mkdir /mydata/logstash

cd /mydata/logstash

vim logstash.conf

#将下面的配置放置到logstash.conf文件中
input {
  tcp {
    mode => "server"
    host => "0.0.0.0"
    port => 4560
    codec => json_lines
    type => "debug"
  }
  tcp {
    mode => "server"
    host => "0.0.0.0"
    port => 4561
    codec => json_lines
    type => "error"
  }
  tcp {
    mode => "server"
    host => "0.0.0.0"
    port => 4562
    codec => json_lines
    type => "business"
  }
  tcp {
    mode => "server"
    host => "0.0.0.0"
    port => 4563
    codec => json_lines
    type => "record"
  }
}
filter{
  if [type] == "record" {
    mutate {
      remove_field => "port"
      remove_field => "host"
      remove_field => "@version"
    }
    json {
      source => "message"
      remove_field => ["message"]
    }
  }
}
output {
  elasticsearch {
    hosts => "es:9200"
    index => "logback-%{type}-%{+YYYY.MM.dd}"
  }
}

docker run --name logstash -p 4560:4560 -p 4561:4561 -p 4562:4562 -p 4563:4563 \
--link elasticsearch:es \
-v /mydata/logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf \
-d logstash:7.6.2

3. docker安装Kibana

docker pull kibana:7.6.2

docker run --name kibana -p 5601:5601 \
--link elasticsearch:es \
-e "elasticsearch.hosts=http://es:9200" \
-d kibana:7.6.2

4. springboot集成logstash

pom.xml引入maven依赖:

<!--集成logstash-->
<dependency>
    <groupId>net.logstash.logback</groupId>
    <artifactId>logstash-logback-encoder</artifactId>
    <version>7.3</version>
</dependency>

resources下创建logback-spring.xml文件,配置如下,ip改成自己部署的logstash服务ip:

<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration>
    <include resource="org/springframework/boot/logging/logback/defaults.xml" />

    <springProperty scope="context" name="springAppName"
                    source="spring.application.name" />

    <!-- 日志在工程中的输出位置 -->
    <property name="LOG_FILE" value="${BUILD_FOLDER:-build}/${springAppName}" />

    <!-- 控制台的日志输出样式 -->
    <property name="CONSOLE_LOG_PATTERN"
              value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}" />

    <!-- 控制台输出 -->
    <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>INFO</level>
        </filter>
        <!-- 日志输出编码 -->
        <encoder>
            <pattern>${CONSOLE_LOG_PATTERN}</pattern>
            <charset>utf8</charset>
        </encoder>
    </appender>

    <!-- 为logstash输出的JSON格式的Appender -->
    <appender name="logstash"
              class="net.logstash.logback.appender.LogstashTcpSocketAppender">
        <destination>ip:4561</destination>
        <!-- 日志输出编码 -->
        <encoder
                class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
            <providers>
                <timestamp>
                    <timeZone>UTC</timeZone>
                </timestamp>
                <pattern>
                    <pattern>
                        {
                        "severity": "%level",
                        "service": "${springAppName:-}",
                        "trace": "%X{X-B3-TraceId:-}",
                        "span": "%X{X-B3-SpanId:-}",
                        "exportable": "%X{X-Span-Export:-}",
                        "pid": "${PID:-}",
                        "thread": "%thread",
                        "class": "%logger{40}",
                        "rest": "%message"
                        }
                    </pattern>
                </pattern>
            </providers>
        </encoder>
    </appender>

    <!-- 日志输出级别 -->
    <root level="INFO">
        <appender-ref ref="console" />
        <appender-ref ref="logstash" />
    </root>

</configuration>

5. 查询记录的日志

启动项目,随便写一个controller,记录日志

@RestController
public class TestLogController {

    /**
     * 获取日志输出对象
     */
    private static final Logger log = LoggerFactory.getLogger(TestLogController.class);

    /**
     * 测试输出log的访问方法
     */
    @GetMapping("/testLog")
    public String testLog() {
        log.error("测试输出一个日志");
        return "success";
    }
}

查询方式一:通过kibana

访问地址:http://ip:5601/

最终可以查看到记录的日志信息

 查询方式二:通过接口访问

添加依赖-添加实体类-添加service类-添加controller类-访问接口

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
        </dependency>
@Data public class LogEntry {
    private String date;
    private String message;
    private String type;
    private String service;

    public LogEntry(String date, String message,String type,String service) {
        this.date = date;
        this.message = message;
        this.type = type;
        this.service = service;
    }
}
@Service
public class ElasticsearchService {

    private final RestHighLevelClient client;

    @Autowired
    public ElasticsearchService(RestHighLevelClient client) {
        this.client = client;
    }

    public List<LogEntry> getAllLogs() {
        List<LogEntry> logEntries = new ArrayList<>();

        try {
            // 构建搜索请求--索引+倒序排序+查询所有
            SearchRequest searchRequest = new SearchRequest("logback-error-*");
            SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
            searchSourceBuilder.query(QueryBuilders.matchAllQuery());
            searchSourceBuilder.sort("@timestamp", SortOrder.DESC);
            searchRequest.source(searchSourceBuilder);

            // 执行搜索请求
            SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);

            // 处理搜索结果
            for (SearchHit hit : searchResponse.getHits().getHits()) {
                Map<String, Object> sourceAsMap = hit.getSourceAsMap();
                String date = (String) sourceAsMap.get("@timestamp");
                String type = (String) sourceAsMap.get("type");
                String rest = (String) sourceAsMap.get("rest");
                String service = (String) sourceAsMap.get("service");
                logEntries.add(new LogEntry(date, rest,type,service));
            }
        } catch (IOException e) {
            e.printStackTrace();
        }

        return logEntries;
    }
}
@RestController
@RequestMapping("/api")
public class ElasticsearchController {

    private final ElasticsearchService elasticsearchService;

    @Autowired
    public ElasticsearchController(ElasticsearchService elasticsearchService) {
        this.elasticsearchService = elasticsearchService;
    }

    @GetMapping("/search")
    public List<LogEntry> searchAllData() {
        return elasticsearchService.getAllLogs();
    }
}

最终可以看到查询出的结果

评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值