因项目ELK已经搭建完毕,此文记录接入日志信息的过程,并且通过注解切面增加日志信息,文章的项目是maven管理的springboot项目
项目中配置
找到pom文件添加如下配置
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>5.3</version>
</dependency>
找到项目中的logback.xml配置文件,添加如下配置,此处10.138.1.80:9601是服务器logstash的端口和ip
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>10.138.1.80:9601</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder"/>
</appender>
<!--普通日志输出到控制台-->
<root level="info">
<appender-ref ref="Console"/>
<appender-ref ref="logstash" />
</root>
创建切面
创建注解类
package com.hdfr.ddms.api.admin.util;
import java.lang.annotation.*;
//定义枚举为方法声明
@Target(ElementType.METHOD)
//生命周期为在训营时由VM保留,可以反射方式读取
@Retention(RetentionPolicy.RUNTIME)
@Documented
//此处声明创建一个注解类
public @interface ElkAopAnnotation {
String value() default "";
ElkAopType type() default ElkAopType.Null;
}
枚举类
package com.hdfr.ddms.api.admin.util;
public enum ElkAopType {
Page("Page", 1),
Menu("Menu", 2),
Button("Button", 3),
Null("Null", 0);
private final String name;
private final Integer value;
private ElkAopType(String name, Integer value) {
this.name = name;
this.value = value;
}
public String getKey() {
return this.name;
}
public Integer getValue() {
return this.value;
}
}
在方法之前调用注解
package com.hdfr.ddms.api.admin.util;
import com.hdfr.ddms.core.security.JwtUser;
import com.hdfr.ddms.core.utils.SecurityUtils;
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.hdfr.ddms.service.JwtUserDetailsService;
@Component
@Aspect
@Slf4j
public class ElkAop {
@Autowired
private JwtUserDetailsService userDetailsService;
@Pointcut("@annotation(com.hdfr.ddms.api.admin.util.ElkAopAnnotation)")
private void testAopCheck(){
}
@Before("testAopCheck() && @annotation(testAopAnnotation)")
public void testAopCheckFirst(ElkAopAnnotation testAopAnnotation){
// 获取当前用户
JwtUser jwtUser = (JwtUser) userDetailsService.loadUserByUsername(SecurityUtils.getUsername());
log.info("{}:ELK日志----------------{}:{}",testAopAnnotation.type(), testAopAnnotation.value());
}
}
随便找一个接口测试一下
@GetMapping
@Log("查询MetadataInfo")
@ApiOperation("查询MetadataInfo")
@PreAuthorize("@el.check('metadataInfo:list')")
//ELK切面所用
@ElkAopAnnotation(value = "分页查询元数据列表", type = ElkAopType.Page)
public ResponseEntity getMetadataInfos(MetadataInfoCriteria criteria, Pageable pageable){
IPage<MetadataInfo> page = QueryHelper.toPage(pageable);
Boolean deleted = criteria.getDeleted();
if(deleted != null ){//查询时关联表存在重复字段,需要增加名,先设置为空
criteria.setDeleted(null);
}
QueryWrapper<MetadataInfo> query = QueryHelper.buildQuery(criteria);
if(deleted != null ){//查询时关联表存在重复字段,需要增加名,先设置为空,在手动添加别名
criteria.setDeleted(null);
query.eq("mdr.deleted",deleted);
}
return new ResponseEntity<>(metadataInfoService.queryPage(page, query),HttpStatus.OK);
}
服务器kibana设置
进入kibanna配置文件
/home/centos/soft/elk/logstash-7.17.7/config
修改配置
//input设置
input {
tcp {
mode => "server"
host => "10.138.1.80"
port => 9601
codec => json_lines
type => "manage"
}
}
//output设置
output {
if[type] == "manage"{
elasticsearch {
hosts => ["10.138.1.80:9200","10.138.1.81:9200","10.138.1.33:9200"]
index => "logstash-%{+YYYY.MM.dd}"
user => "elastic"
password => "elasticsearch@Kay123"
}
}
}
开启对应端口防火墙,重启kibana