logback 结合kafka保存日志信息到elk

流程图

参考 https://blog.csdn.net/zhangruhong168/article/details/76973212

实施步骤

1.引入 net.logstash.logback 以及 logback-kafka-appender

 

<!--logstash 整合logback-->

 

        <dependency>

            <groupId>net.logstash.logback</groupId>

            <artifactId>logstash-logback-encoder</artifactId>

            <version>4.11</version>

            <exclusions>

                <exclusion>

                    <groupId>ch.qos.logback</groupId>

                    <artifactId>logback-core</artifactId>

                </exclusion>

            </exclusions>

        </dependency>

 

        <!--logback 整合 kafka-->

 

        <dependency>

            <groupId>com.github.danielwegener</groupId>

            <artifactId>logback-kafka-appender</artifactId>

            <version>0.1.0</version>

            <scope>runtime</scope>

        </dependency>

 

2.在 logback.xml 配置里面加入logback-kafka-appender配置

 

 

<!--log 日志存入 kafka 再存入logstash -->

<appender name="kafkaAppender" class="com.github.danielwegener.logback.kafka.KafkaAppender">

<encoder class="com.github.danielwegener.logback.kafka.encoding.PatternLayoutKafkaMessageEncoder">

<layout class="net.logstash.logback.layout.LogstashLayout" >

<!--开启的话会包含hostname等logback的context信息-->

<includeContext>true</includeContext>

<!--是否包含日志来源-->

<includeCallerData>true</includeCallerData>

<fieldNames class="net.logstash.logback.fieldnames.ShortenedFieldNames"/>

</layout>

<charset>UTF-8</charset>

</encoder>

 <!--kafka topic 需要与配置文件里面的topic一致 否则kafka不认识-->

<topic>behaviorlog</topic>

<!--主键分区策略-->

<keyingStrategy class="com.github.danielwegener.logback.kafka.keying.RoundRobinKeyingStrategy" />

<!--kafka消息提交策略,logback-kafka-appender为我们提供了两种策略,异步提交策略(AsynchronousDeliveryStrategy)和阻塞提交策略(BlockingDeliveryStrategy)-->

<deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy" />

<!--bootstrap.servers 为kafka 部署地址-->

<producerConfig>bootstrap.servers=192.168.4.118:9092,192.168.4.118:9093,192.168.4.118:9094</producerConfig>

</appender>

 

<appender name="kafkaAppenderAsync" class="ch.qos.logback.classic.AsyncAppender">

<appender-ref ref="kafkaAppender" />

</appender>

 

      <!--记录行为日志到 kafka-->

<logger name="KafkaPipeline" level="INFO">

<appender-ref ref="kafkaAppenderAsync" />

</logger>

 

3.在web.xml 中加入

        

<context-param>

    <param-name>logbackConfigLocation</param-name>

    <param-value>classpath:/logback.xml</param-value>

  </context-param>

 

4.部署kafka

 

 启动kafka

 nohup ./bin/kafka-server-start.sh config/server.properties &

  nohup ./bin/kafka-server-start.sh config/server-1.properties > nohup-1.out &

  nohup ./bin/kafka-server-start.sh config/server-2.properties > nohup-2.out &

  nohup ./bin/kafka-server-start.sh config/server-3.properties > nohup-3.out &

 

创建 topic

./bin/kafka-topics.sh --create --zookeeper 192.168.4.118:2281 --replication-factor 3 -partitions 3 --topic behavior

 

生产消息

./bin/kafka-console-producer.sh --broker-list 192.168.4.118:9092 --topic behaviorlog

 

消费消息

./bin/kafka-console-consumer.sh --bootstrap-server 192.168.4.118:9092 --topic behaviorlog --from-beginning

 

5.部署logstash

 

 logstash 解压目录 conf 目录下创建 logstash-es.conf

 

nohup  ./bin/logstash -f conf/logstash-es.conf &

 

input{

          kafka{

                bootstrap_servers => ["192.168.4.118:9092"]

                auto_offset_reset => "latest"

                consumer_threads => 5

                decorate_events => true

                topics => ["behaviorlog"]

                type => "bhy"

                }

        }

 

output {

 elasticsearch {

   index => "behaviorlog-%{+YYYY.MM.dd}"

    hosts => ["192.168.4.118:9200"]

 }

}

6.定义切面

package com.tzg.app.aop.advice;

 

import com.alibaba.fastjson.JSON;

import com.alibaba.fastjson.JSONArray;

import com.alibaba.fastjson.JSONObject;

import com.tzg.app.aop.OperationType;

import com.tzg.app.aop.annotation.UserBehaviorLog;

import java.lang.reflect.Method;

import java.util.Arrays;

import java.util.List;

import java.util.stream.Collectors;

import java.util.stream.Stream;

import javax.servlet.http.HttpServletRequest;

import javax.servlet.http.HttpServletResponse;

import org.apache.commons.lang3.ArrayUtils;

import org.aspectj.lang.JoinPoint;

import org.aspectj.lang.annotation.AfterReturning;

import org.aspectj.lang.annotation.AfterThrowing;

import org.aspectj.lang.annotation.Aspect;

import org.aspectj.lang.annotation.Before;

import org.slf4j.Logger;

import org.slf4j.LoggerFactory;

import org.springframework.stereotype.Component;

@Component

@Aspect

public class UserBehaviorLogAdvice {

  /**

   * Logger for this class

   */

  private static final Logger behaviorLogger = LoggerFactory.getLogger("KafkaPipeline");

 

  protected static Logger logger = LoggerFactory.getLogger(UserBehaviorLogAdvice.class);

 

  @Before("@annotation(com.tzg.app.aop.annotation.UserBehaviorLog)")

  public void addBeforeLogger(JoinPoint joinPoint) {

//    String clazzName = joinPoint.getTarget().getClass().getName();

//    String methodName = joinPoint.getSignature().getName();

//    Object[] objects = joinPoint.getArgs();

//    behaviorLogger.info("进入 clazzName=" + clazzName + ",method=" + methodName + ",args=" + objects);

  }

 

  @AfterReturning(returning="returnVal", pointcut="@annotation(com.tzg.app.aop.annotation.UserBehaviorLog)")

  public void addAfterReturningLogger(JoinPoint joinPoint,Object returnVal) {

    try {

      Object[] args = joinPoint.getArgs();

      JSONObject jsonInfo = this.getMethodInfo(joinPoint);

      JSONObject json = new JSONObject();

      //序列化时过滤掉request和response

      List<Object> logArgs = this.streamOf(args)

          .filter(arg -> (!(arg instanceof HttpServletRequest) && !(arg instanceof HttpServletResponse)))

          .collect(Collectors.toList());

      json.put("args",JSONArray.parseArray(JSON.toJSONString(logArgs)));

      json.put("returnVal",returnVal);

      json.putAll(jsonInfo);

      behaviorLogger.info(json.toJSONString());

    } catch (Exception e) {

      logger.error("解析UserBehaviorLog注解异常!");

    }

  }

  @AfterThrowing(pointcut = "@annotation(com.tzg.app.aop.annotation.UserBehaviorLog)", throwing = "ex")

  public void addAfterThrowingLogger(JoinPoint joinPoint, Exception ex) {

      try {

        Object[] args = joinPoint.getArgs();

        JSONObject json = new JSONObject();

        //序列化时过滤掉request和response

        List<Object> logArgs = this.streamOf(args)

            .filter(arg -> (!(arg instanceof HttpServletRequest) && !(arg instanceof HttpServletResponse)))

            .collect(Collectors.toList());

        json.put("args",JSONArray.parseArray(JSON.toJSONString(logArgs)));

        json.put("exception",ex.getMessage());

        JSONObject jsonInfo = this.getMethodInfo(joinPoint);

        json.putAll(jsonInfo);

        behaviorLogger.info(json.toJSONString());

    } catch (Exception e) {

      logger.error("解析UserBehaviorLog注解异常!");

    }

  }

  public static <T> Stream<T> streamOf(T[] array) {

    return ArrayUtils.isEmpty(array) ? Stream.empty() : Arrays.asList(array).stream();

  }

  public JSONObject getMethodInfo(JoinPoint joinPoint) throws Exception {

    String targetName = joinPoint.getTarget().getClass().getName();

    String methodName = joinPoint.getSignature().getName();

    Object[] arguments = joinPoint.getArgs();

    Class targetClass = Class.forName(targetName);

    Method[] methods = targetClass.getMethods();

    String description = "";

    JSONObject json = new JSONObject();

    json.put("clazzName",targetName);

    json.put("methodName",methodName);

    OperationType type;

    for (Method method : methods) {

      if (method.getName().equals(methodName)) {

        Class[] clazzs = method.getParameterTypes();

        if (clazzs.length == arguments.length) {

          description = method.getAnnotation(UserBehaviorLog.class).description();

          type = method.getAnnotation(UserBehaviorLog.class).type();

          json.put("description",description);

          json.put("type",type);

          break;

        }

      }

    }

    return json;

  }

  public Object getReturnVal(JoinPoint joinPoint) throws Exception {

    String targetName = joinPoint.getTarget().getClass().getName();

    String methodName = joinPoint.getSignature().getName();

    Object[] arguments = joinPoint.getArgs();

    Class targetClass = Class.forName(targetName);

    Method[] methods = targetClass.getMethods();

    Object returnVal = null;

    for (Method method : methods) {

      if (method.getName().equals(methodName)) {

        Class[] clazzs = method.getParameterTypes();

        if (clazzs.length == arguments.length) {

          returnVal = method.invoke(arguments);

          break;

        }

      }

    }

    return returnVal;

  }

}

 

7.定义切面用到的注解和枚举

//定义 注解

package com.tzg.app.aop.annotation;

import com.tzg.app.aop.OperationType;

import java.lang.annotation.Documented;

import java.lang.annotation.ElementType;

import java.lang.annotation.Retention;

import java.lang.annotation.RetentionPolicy;

import java.lang.annotation.Target;

 

@Target(ElementType.METHOD)

@Retention(RetentionPolicy.RUNTIME)

@Documented

public @interface UserBehaviorLog {

 

  /**

   * 描述

   */

  String description() default "";

 

  /**

   * 操作类型

   */

  OperationType type();

}

 

//定义枚举

 

package com.tzg.app.aop;

 

/**

 * 操作类型

 */

public enum OperationType {

 

  ADD("add"), DEL("del"), UPDATE("update"), QUERY("query");

 

  private String type;

 

  private OperationType(String type) {

    this.type = type;

  }

 

  public String getType() {

    return type;

  }

}

 

7.代码中使用注解

    @UserBehaviorLog(type = OperationType.QUERY,description = "测试测试")

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值