统一日志格式

springboot


Logback.xml

对接前需要确保配置中有如下配置项:

spring.application.name 项目名

项目名.env

项目名.version

项目名.log.level

项目名.log.path

graylog.server.ip:graylog的ip地址,sit 环境为 180.166.139.160

<?xml version="1.0" encoding="UTF-8"?>

<configuration scan="true" scanPeriod="60 seconds" debug="true">

    <!-- 需要替换成自己项目的配置文件的key值 -->

    <springProperty scope="context" name="name" source="spring.application.name"/>

    <springProperty scope="context" name="env" source="spritz.env"/>

    <springProperty scope="context" name="version" source="spritz.version"/>

    <springProperty scope="context" name="logLevel" source="spritz.log.level"/>

    <springProperty scope="context" name="logHome" source="spritz.log.path"/>

    <springProperty scope="context" name="serverIp" source="graylog.server.ip" defaultValue="1.1.1.1"/>

    <define name="ipAddress" class="com.haocheemai.wuling.tools.log.IPAddressProperty"/>

    <!-- ch.qos.logback.core.ConsoleAppender  -->

    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">

        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">

            <Pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [%X{traceId}] [%logger{50}:%line] - %msg%n</Pattern>

        </encoder>

    </appender>

    <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">

        <file>${logHome}/spritz.log</file>

        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">

            <!--日志文件输出的文件名-->

            <FileNamePattern>${logHome}/spritz.%d{yyyy-MM-dd}.%i.log</FileNamePattern>

            <!--日志文件保留天数-->

            <MaxHistory>90</MaxHistory>

            <maxFileSize>100MB</maxFileSize>

            <totalSizeCap>10GB</totalSizeCap>

        </rollingPolicy>

        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">

            <Pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [%X{traceId}] [%logger{50}:%line] - %msg%n</Pattern>

        </encoder>

    </appender>

    <appender name="GELF" class="de.siegmar.logbackgelf.GelfUdpAppender">

        <graylogHost>${serverIp}</graylogHost>

        <graylogPort>12253</graylogPort>

        <maxChunkSize>508</maxChunkSize>

        <useCompression>true</useCompression>

        <!--<messageIdSupplier class="de.siegmar.logbackgelf.MessageIdSupplier"/>-->

        <encoder class="de.siegmar.logbackgelf.GelfEncoder">

            <originHost>${ipAddress}</originHost>

            <includeRawMessage>false</includeRawMessage>

            <includeMarker>true</includeMarker>

            <includeMdcData>true</includeMdcData>

            <includeCallerData>false</includeCallerData>

            <includeRootCauseData>false</includeRootCauseData>

            <includeLevelName>false</includeLevelName>

            <shortPatternLayout class="ch.qos.logback.classic.PatternLayout">

                <pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%X{traceId}] %msg%n</pattern>

            </shortPatternLayout>

            <fullPatternLayout class="ch.qos.logback.classic.PatternLayout">

                <pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [%X{traceId}] [%logger{50}:%line] - %msg%n

                </pattern>

            </fullPatternLayout>

            <!--<numbersAsString>false</numbersAsString>-->

            <staticField>app_name:${name}</staticField>

            <staticField>app_version:${version}</staticField>

            <staticField>app_env:${env}</staticField>

            <staticField>os_arch:${os.arch}</staticField>

            <staticField>os_name:${os.name}</staticField>

            <staticField>os_version:${os.version}</staticField>

            <staticField>java_version:${java.version}</staticField>

        </encoder>

    </appender>

    <root level="${logLevel}">

        <appender-ref ref="STDOUT"/>

        <appender-ref ref="GELF"/>

        <appender-ref ref="FILE"/>

    </root>

</configuration>

springmvc

logback

<?xml version="1.0" encoding="UTF-8"?>

<configuration scan="true" scanPeriod="60 seconds" debug="false">

    <contextListener class="cn.anji.leasing.gateway.apollo.LoggerStartupListener" />

    <property scope="context" name="name"  value="${espresso.application.name}"/>

    <property scope="context" name="env" value="${espresso.env}"/>

    <property scope="context" name="version" value="${espresso.version}"/>

    <property scope="context" name="logHome" value="${espresso.log.path}" />

    <property scope="context" name="logLevel" value="${espresso.log.level}"/>

    <property scope="context" name="serverIp" value="${graylog.server.ip}" defaultValue="1.1.1.1"/>

    <define name="ipAddress"  class="com.haocheemai.wuling.tools.log.IPAddressProperty"/>

     

    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">

        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">

            <Pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [%X{traceId}] [%logger{50}:%line] - %msg%n</Pattern>

        </encoder>

    </appender>

    <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">

        <file>${logHome}/espresso.log</file>

        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">

            <!--日志文件输出的文件名-->

            <FileNamePattern>${logHome}/espresso.%d{yyyy-MM-dd}.%i.log</FileNamePattern>

            <!--日志文件保留天数-->

            <MaxHistory>90</MaxHistory>

            <maxFileSize>100MB</maxFileSize>

            <totalSizeCap>20GB</totalSizeCap>

        </rollingPolicy>

        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">

            <Pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [%X{traceId}] [%logger{50}:%line] - %msg%n</Pattern>

        </encoder>

    </appender>

    <appender name="GELF" class="de.siegmar.logbackgelf.GelfUdpAppender">

        <graylogHost>${serverIp}</graylogHost>

        <graylogPort>12253</graylogPort>

        <maxChunkSize>508</maxChunkSize>

        <useCompression>true</useCompression>

        <encoder class="de.siegmar.logbackgelf.GelfEncoder">

            <originHost>${ipAddress}</originHost>

            <includeRawMessage>false</includeRawMessage>

            <includeMarker>true</includeMarker>

            <includeMdcData>true</includeMdcData>

            <includeCallerData>false</includeCallerData>

            <includeRootCauseData>false</includeRootCauseData>

            <includeLevelName>false</includeLevelName>

            <shortPatternLayout class="ch.qos.logback.classic.PatternLayout">

                <pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%X{traceId}] %msg%n</pattern>

            </shortPatternLayout>

            <fullPatternLayout class="ch.qos.logback.classic.PatternLayout">

                <pattern>%-5level %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [%X{traceId}] [%logger{50}:%line] - %msg%n

                </pattern>

            </fullPatternLayout>

            <staticField>app_name:${name}</staticField>

            <staticField>app_version:${version}</staticField>

            <staticField>app_env:${env}</staticField>

            <staticField>os_arch:${os.arch}</staticField>

            <staticField>os_name:${os.name}</staticField>

            <staticField>os_version:${os.version}</staticField>

            <staticField>java_version:${java.version}</staticField>

        </encoder>

    </appender>

    <root level="${logLevel}">

        <appender-ref ref="STDOUT"/>

        <appender-ref ref="FILE"/>

        <appender-ref ref="GELF"/>

    </root>

</configuration>

apollo+logback

log

public class LoggerStartupListener extends ContextAwareBase implements LoggerContextListener, LifeCycle {

    private final org.slf4j.Logger log = LoggerFactory.getLogger(LoggerStartupListener.class);

    private static boolean started = false;

    private static String APP_NAME = "espresso.application.name";

    private static String LOG_ENV = "espresso.env";

    private static String LOG_VERSION = "espresso.version";

    private static String LOG_PATH = "espresso.log.path";

    private static String LOG_LEVEL = "espresso.log.level";

    private static String GRAY_LOG_IP = "graylog.server.ip";

    @Override

    public void start() {

        if (started) {

            return;

        }

        //从apollo中获取日志所有配置信息

        Config config = ConfigService.getAppConfig();

        String logPath = config.getProperty(LOG_PATH, "/logs");

        String logLevel = config.getProperty(LOG_LEVEL, "INFO");

        String grayLogIp = config.getProperty(GRAY_LOG_IP, "1.1.1.1");

        String appName = config.getProperty(APP_NAME, "");

        String env = config.getProperty(LOG_ENV, "");

        String version = config.getProperty(LOG_VERSION, "");

        Context context = getContext();

        //logcontext热更新可以更新,但日志不生效

        /*config.addChangeListener(new ConfigChangeListener() {

            @Override

            public void onChange(ConfigChangeEvent configChangeEvent) {

                for (String key : configChangeEvent.changedKeys()) {

                    if (LOG_PATH.equals(key) || LOG_LEVEL.equals(key) || GRAY_LOG_IP.equals(key) || APP_NAME.equals(key)){

                        ConfigChange change = configChangeEvent.getChange(key);

                        reloadDefaultConfiguration(key, change);

                    }

                }

            }

        });*/

        context.putProperty(LOG_PATH, logPath);

        context.putProperty(LOG_LEVEL, logLevel);

        context.putProperty(GRAY_LOG_IP, grayLogIp);

        context.putProperty(APP_NAME, appName);

        context.putProperty(LOG_ENV, env);

        context.putProperty(LOG_VERSION, version);

        started = true;

    }

    @Override

    public void stop() {

    }

    @Override

    public boolean isStarted() {

        return started;

    }

    @Override

    public boolean isResetResistant() {

        return true;

    }

    @Override

    public void onStart(LoggerContext context) {

    }

    @Override

    public void onReset(LoggerContext context) {

    }

    @Override

    public void onStop(LoggerContext context) {

    }

    @Override

    public void onLevelChange(Logger logger, Level level) {

    }

    private void reloadDefaultConfiguration(String key, ConfigChange change)  {

        LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();

        ContextInitializer ci = new ContextInitializer(loggerContext);

        URL url = ci.findURLOfDefaultConfigurationFile(true);

        loggerContext.reset();

        loggerContext.putProperty(key, change.getNewValue());

        log.info("改变的{}={}", key, change.getNewValue());

        try {

            ci.configureByResource(url);

        catch (JoranException e) {

            e.printStackTrace();

        }

    }

}

dockerfile

dockerfile

USER root

RUN bash -c 'mkdir -p /logs '

RUN bash -c 'chown jetty.jetty -R /logs '

RUN bash -c 'mkdir -p /opt/data '

RUN bash -c 'chown jetty.jetty -R /opt/data '

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值