日志配置:
xml
logback-spring.xml
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<include resource="org/springframework/boot/logging/logback/console-appender.xml" />
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>C:/log/stdout.log</File>
<encoder>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 添加.gz 历史日志会启用压缩 大大缩小日志文件所占空间 -->
<!--<fileNamePattern>/home/hfw-client/hfw_log/stdout.log.%d{yyyy-MM-dd}.log</fileNamePattern>-->
<fileNamePattern>D:/dth/log/stdout.log.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory> <!-- 保留30天日志 -->
</rollingPolicy>
</appender>
<logger name="com.zxd" level="DEBUG" />
<root level="DEBUG">
<appender-ref ref="CONSOLE" />
<appender-ref ref="FILE" />
</root>
yaml
logging:
config: classpath:logback-spring.xml #就是xml配置文件的地址
level:
com.zxd: DEBUG #包的前缀 日志级别
file: C:/log #日志的路径
path: C:/log
pattern:
file: '%d{yyyy/MM/dd} === [%thread] == %-5level == %logger{50} == %msg%n' #输出的日志的格式
链接池
spring:
datasource:
name: text
type: com.alibaba.druid.pool.DruidDataSource
# 连接池
druid:
# 监控统计拦截
filter: star
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://192.168.187.135:3306/changgou_user?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC
username: root
password: 123456
# 配置初始化大小/最小/最大
initial-size: 1
min-idle: 1
max-active: 20
#获取连接等待超时时间
max-wait: 60000
#间隔多久进行一次检测,检测需要关闭的空闲连接
time-between-eviction-runs-millis: 60000
#一个连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
validation-query: SELECT 'x'
test-while-idle: true
test-on-borrow: false
test-on-return: false
#打开PSCache,并指定每个连接上PSCache的大小。oracle设为true,mysql设为false。分库分表较多推荐设置为false
pool-prepared-statements: false
# 最大连接个数
max-pool-prepared-statement-per-connection-size: 20
thymeleaf
spring:
thymeleaf:
cache: false
enconding: UTF-8 # thml页面使用的编码格式