第一次连接vpn的配置
https://me.jinchuang.org/archives/381.html
https://axiu.me/documentation/win-10-vpn-no-response/
https://blog.csdn.net/sinat_32862717/article/details/81459054
前端页面数字相加出现精度错误
//使用toFixed(2) 使数字保留两位精度
<span>{{ num.toFixed(2)}}</span>
//过滤器
<span>{{ num | numberFilter}}</span>
filters:{
numberFilter(val){
return val === '' ||
val === null ||
val === undefined ?
0 : val.toFixed(2)
}
}
element-ui 上的值为空显示0
<el-table-column
prop="tbProjecoverallIndicators.totalBuildingArea"
header-align="center"
align="center"
width="160"
label="总规划方案-总建筑面积(㎡)"
:formatter="totalFormatFun">
methods: {
totalFormatFun(row, column, cellValue, index) {
if (cellValue === undefined || cellValue == null) {
return 0
} else {
return cellValue
}
//return (cellValue == undefined || cellValue == null) ? 0 : cellValue
}
将本地jar包安装到本地maven仓库
mvn install:install-file -Dfile=D:/浏览器下载/spring-cloud-netflix-core-2.0.0.M2.jar -DgroupId=org.springframework.cloud -DartifactId=spring-cloud-netflix-core -Dversion=2.0.0.M2 -Dpackaging=jar
mvn install:install-file -Dfile=yundt-cube-utils-2.1.6-RC.jar -DgroupId=com.dtyunxi.cube -DartifactId=yundt-cube-utils -Dversion=2.1.6-RC -Dpackaging=jar
热部署插件 jrebel
Service Address :https://jrebel.qekang.com/{GUID}
GUID地址:
https://www.guidgen.com
http://www.ofmonkey.com/transfer/guid
https://www.guidgenerator.com/online-guid-generator.aspx
https://blog.csdn.net/qierkang/article/details/95095954
mapper文件热部署插件
Jrebel mybatisPlus extension
Echarts设置最小间隔
yAxis: [
{
type: 'value',
name: '数量',
minInterval: 1, //设置最小间隔
axisLabel: {
formatter: '{value}'
}
},
关于String contains
contains(null)会空指针
关于mysql 连表查询的错误
mysql在进行连表查询时,如果连表的跳进不一致,可能会导致出现数据异常
可以使用cast(t.id as char) 或者 convert(t.id, char)
cast(tp.store_id as UNSIGNED INTEGER) = cast( tsmm.id as UNSIGNED INTEGER)
X拓展 @RequestBody
@GetMapping 不能使用@RequestBody
,因为没有HTTPEntity 只能用@RequestParam
post随意
mybatis plus 插入的时候获取自增主键id
<insert id="insertSysUser" parameterType="com.tianren.tunny.admin.api.entity.SysUser"
keyProperty="userId" useGeneratedKeys="true" keyColumn="user_id">
insert into sys_user
<trim prefix="(" suffix=")" suffixOverrides=",">
<if test="userId != null">user_id,</if>
</trim>
<trim prefix="values (" suffix=")" suffixOverrides=",">
<if test="userId != null">#{userId},</if>
</trim>
<selectKey resultType="int" keyProperty="userId" order="AFTER">
SELECT LAST_INSERT_ID()
</selectKey>
</insert>
CentOS 忘记root 密码
https://blog.csdn.net/zzzjjjfff/article/details/79507201?utm_medium=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-2.channel_param&depth_1-utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-2.channel_param
导出数据库表结构
select column_name as '字段名',
column_comment as '描述',
column_type as '数据类型',
column_key as '键',
is_nullable as '是否为空',
column_default as '默认值'
from information_schema.columns where table_schema='eiiplat_test' and table_name='tb_shipping_point';
多个微服务打开services工具栏
view->tools windows->services
Add services -> as …Type -> Spring boot
IDEA快捷键
序号 | 描述 | 键位 |
---|---|---|
1 | 由controller直接跳转到impl | ctrl + alt + b /左键 |
2 | 查看继承树 | ctrl + alt + u |
3 | 查看方法在哪里呗调用了 | ctrl + alt + h |
spring boot 打包
<packaging>jar</packaging>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<mainClass>com.etoak.EmpApp</mainClass>
</configuration>
<version>1.4.2.RELEASE</version>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>build-info</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<target>${java.version}</target>
<source>${java.version}</source>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.sonarsource.scanner.maven</groupId>
<artifactId>sonar-maven-plugin</artifactId>
<version>3.2</version>
</plugin>
</plugins>
</build>
生成日志文件
- 没有配置logback,将日志输出到指定位置
nohup java -jar XXXX.jar >log.log &
- 使用logback-spring.xml配置
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径-->
<property name="LOG_HOME" value="/test/log" />
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
</appender>
<!-- 按照每天生成日志文件 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名-->
<FileNamePattern>${LOG_HOME}/my.log.%d{yyyy-MM-dd}.log</FileNamePattern>
<!--日志文件保留天数-->
<MaxHistory>30</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符-->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
</encoder>
<!--日志文件最大的大小-->
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>10MB</MaxFileSize>
</triggeringPolicy>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>
- 使用application.yml配置
logback-spring.xml的优先级高于application.yml
logging:
pattern:
file: "%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n"
console: "%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n"
path: "./logs/"
file: "./logs/my.log"
file.max-size: 10MB
level:
root: INFO
简化版本
logging:
level:
com.etoak.bean: DEBUG
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<contextName>tunny-basic</contextName>
<!--输出文件路径-->
<property name="log.path" value="../logs/tunny-mall.log"/>
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<!--logback默认过滤器,若开启则error以下的日志级别都不会输出-->
<!-- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>-->
<encoder>
<pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--输出到文件-->
<appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}</file>
<!--日志滚动策略,一天切割一次-->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>../logs/mall/mall-logback.%d{yyyy-MM-dd}.log</fileNamePattern>
<!--只保留最近15天的日志-->
<maxHistory>15</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<totalSizeCap>1GB</totalSizeCap>
</rollingPolicy>
<!-- 日志输出编码格式-->
<encoder>
<charset>UTF-8</charset>
<pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<root level="info">
<appender-ref ref="console"/>
<appender-ref ref="file"/>
</root>
<!-- 测试环境+开发环境. 多个使用逗号隔开. -->
<springProfile name="test,dev">
<logger name="com.tianren" level="debug" additivity="true" >
<!-- <appender-ref ref="console"/>-->
</logger>
</springProfile>
<!-- 生产环境可设置成info 或error -->
<springProfile name="prod">
<root level="info">
<appender-ref ref="console"/>
<appender-ref ref="file"/>
</root>
<!-- <logger name="com.tianren" level="ERROR" additivity="false" >
<appender-ref ref="file"/>
</logger>-->
</springProfile>
</configuration>
spring boot获取项目下的文件
- 此种方式本地IDEA测试能够跑通,但是打成jar包就不能运行了
File file = ResourceUtils.getFile("classpath:static/运营统计分析-供应商量统计分析-导出模板.xlsx");
解决办法
- 配置静态资源访问路径,直接用绝对路径访问
@Configuration
public class WebMvcConfiger implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/video/**")
.addResourceLocations("file:/root/webapps/video/");
}
}
@GetMapping("/download")
@ResponseBody
public void downloadVideo(HttpServletResponse response) throws Exception{
File file = new File("/root/webapps/video/yanyuyu.mp4");
FileInputStream fis = new FileInputStream(file);
ServletOutputStream sos = response.getOutputStream();
response.setHeader("Content-Disposition","attachment;filename=yanyuyu.mp4");
byte[] b = new byte[1024 * 8];
int len = 0;
while((len = fis.read(b)) != -1){
sos.write(b, 0, len);
}
}
<div id="container">
<a th:href="@{/download}" style="margin: 18px auto">下载原视频</a>
<br>
<video id="video" controls autoplay="autoplay">
<source th:src="@{/video/xxx.mp4}" type="video/mp4">
</video>
</div>
- 使用ClassPathResource
ClassPathResource resource = new ClassPathResource("static/test.xlsx");
InputStream inputStream = resource.getInputStream();
File somethingFile = File.createTempFile("test", ".xlsx");
FileUtils.copyInputStreamToFile(inputStream, somethingFile);
分布式自增id 雪花算法
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.10</version>
</dependency>
package com.javacode.util;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang.ArrayUtils;
import java.net.Inet4Address;
import java.net.UnknownHostException;
/**
* 描述: Twitter的分布式自增ID雪花算法snowflake (Java版)
**/
public class SnowFlake {
/**
* 起始的时间戳
*/
private final static long START_STMP = 1480166465631L;
/**
* 每一部分占用的位数
*/
private final static long SEQUENCE_BIT = 12; //序列号占用的位数
private final static long MACHINE_BIT = 5; //机器标识占用的位数
private final static long DATACENTER_BIT = 5;//数据中心占用的位数
/**
* 每一部分的最大值
*/
private final static long MAX_DATACENTER_NUM = -1L ^ (-1L << DATACENTER_BIT);
private final static long MAX_MACHINE_NUM = -1L ^ (-1L << MACHINE_BIT);
private final static long MAX_SEQUENCE = -1L ^ (-1L << SEQUENCE_BIT);
/**
* 每一部分向左的位移
*/
private final static long MACHINE_LEFT = SEQUENCE_BIT;
private final static long DATACENTER_LEFT = SEQUENCE_BIT + MACHINE_BIT;
private final static long TIMESTMP_LEFT = DATACENTER_LEFT + DATACENTER_BIT;
private long datacenterId; //数据中心
private long machineId; //机器标识
private long sequence = 0L; //序列号
private long lastStmp = -1L;//上一次时间戳
public SnowFlake(long datacenterId, long machineId) {
datacenterId = getWorkId();
machineId = 3;
if (datacenterId > MAX_DATACENTER_NUM || datacenterId < 0) {
throw new IllegalArgumentException("datacenterId can't be greater than MAX_DATACENTER_NUM or less than 0");
}
if (machineId > MAX_MACHINE_NUM || machineId < 0) {
throw new IllegalArgumentException("machineId can't be greater than MAX_MACHINE_NUM or less than 0");
}
this.datacenterId = datacenterId;
this.machineId = machineId;
}
/**
* 产生下一个ID
*
* @return
*/
public synchronized String nextId() {
long currStmp = getNewstmp();
if (currStmp < lastStmp) {
throw new RuntimeException("Clock moved backwards. Refusing to generate id");
}
if (currStmp == lastStmp) {
//相同毫秒内,序列号自增
sequence = (sequence + 1) & MAX_SEQUENCE;
//同一毫秒的序列数已经达到最大
if (sequence == 0L) {
currStmp = getNextMill();
}
} else {
//不同毫秒内,序列号置为0
sequence = 0L;
}
lastStmp = currStmp;
return String.valueOf((currStmp - START_STMP) << TIMESTMP_LEFT //时间戳部分
| datacenterId << DATACENTER_LEFT //数据中心部分
| machineId << MACHINE_LEFT //机器标识部分
| sequence); //序列号部分
}
private long getNextMill() {
long mill = getNewstmp();
while (mill <= lastStmp) {
mill = getNewstmp();
}
return mill;
}
private long getNewstmp() {
return System.currentTimeMillis();
}
public synchronized String getNum(String num){
Integer integer = Integer.valueOf(num)+1;
if (integer < 10){
System.out.println("0000"+integer);
return "0000"+integer;
}else if (integer < 100){
System.out.println("000"+integer);
return "000"+integer;
}else if (integer < 1000){
System.out.println("00"+integer);
return "00"+integer;
}else if (integer < 10000){
System.out.println("0"+integer);
return "0"+integer;
}else {
return ""+integer;
}
}
private static Long getWorkId(){
try {
String hostAddress = Inet4Address.getLocalHost().getHostAddress();
int[] ints = toCodePoints(hostAddress);
int sums = 0;
for(int b : ints){
sums += b;
}
return (long)(sums % 32);
} catch (UnknownHostException e) {
// 如果获取失败,则使用随机数备用
return RandomUtils.nextLong(0,31);
}
}
private static Long getDataCenterId(){
int[] ints = toCodePoints(SystemUtils.getHostName());
int sums = 0;
for (int i: ints) {
sums += i;
}
return (long)(sums % 32);
}
public static void main(String[] args) {
SnowFlake snowFlake = new SnowFlake(getWorkId(),getDataCenterId());
long start = System.currentTimeMillis();
for (int i = 0; i < 10; i++) {
String newLong = snowFlake.nextId();
System.out.println(newLong +"/长度:" + String.valueOf(newLong).length());
}
System.out.println(System.currentTimeMillis() - start);
}
public static int[] toCodePoints(CharSequence str) {
if (str == null) {
return null;
} else if (str.length() == 0) {
return ArrayUtils.EMPTY_INT_ARRAY;
} else {
String s = str.toString();
int[] result = new int[s.codePointCount(0, s.length())];
int index = 0;
for(int i = 0; i < result.length; ++i) {
result[i] = s.codePointAt(index);
index += Character.charCount(result[i]);
}
return result;
}
}
}
通用返回类型
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
/**
* 响应信息主体
*/
@ToString
public class R<T> implements Serializable {
private static final int SUCCESS = 0;
private static final int FAIL = 1;
private static final long serialVersionUID = 1L;
@Getter
@Setter
private String msg = "success";
@Getter
@Setter
private Object object;
@Getter
@Setter
private int code = SUCCESS;
@Getter
@Setter
private T data;
public R() {
super();
}
public R(T data) {
super();
this.data = data;
}
public R(T data, String msg) {
super();
this.data = data;
this.msg = msg;
}
public R(Throwable e) {
super();
this.msg = e.getMessage();
this.code = FAIL;
}
public R(T data, String msg, Object object, int code) {
super();
this.data = data;
this.msg = msg;
this.object = object;
this.code = code;
}
}
Swagger3
http://localhost:8080/api/swagger-ui/index.html
MybatisPlus控制台打印日志
#测试环境开启sql日志打印
mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl
mybatis-plus:
configuration:
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
解决LocalDateTime变成数组的问题
@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
@JsonFormat(timezone = "Asia/Shanghai",pattern = "yyyy-MM-dd HH:mm:ss")
private LocalDateTime createTime;
Mysql时间函数
获取当前时间前三个小时
select date_sub(NOW(),interval 3 hour);
获取当前时间后三个小时
select date_add(NOW(),interval 3 hour);
mysql分组后查询每组的前几个
SELECT
tp.*,
tpo.url AS url,
pc.NAME AS cc_name,
tb.url AS brand_url
FROM
(
SELECT
SUBSTRING_INDEX( GROUP_CONCAT( tp.id ORDER BY tp.create_time DESC ), ',', 6 ) tp_ids,
pc.name
FROM
tr_product tp
LEFT JOIN product_category pc ON cast( tp.product_category_id AS UNSIGNED INTEGER ) = cast( pc.id AS UNSIGNED INTEGER )
<where>
tp.is_marketable = '1'
AND tp.del_flag = '1'
<if test="list != null">
AND pc.NAME IN
<foreach collection="list" item="ccName" open="(" separator="," close=")">
#{ccName}
</foreach>
</if>
</where>
GROUP BY
pc.name
) t
LEFT JOIN tr_product tp
ON INSTR( t.tp_ids, tp.id ) > 0
LEFT JOIN product_category pc ON cast( tp.product_category_id AS UNSIGNED INTEGER ) = cast( pc.id AS UNSIGNED INTEGER )
left join (select min(create_time),url,product_id FROM tb_product_oss where dict_value = 'productImg' GROUP BY product_id) tpo
ON tp.id = tpo.product_id
LEFT JOIN tr_brand tb ON tp.brand_id = tb.id
group by tp.product_category_id, tp.store_id
ORDER BY
tp.id ASC
复杂JSON 用 谷歌的Gson
{
"data":[{
"out":{
"result":"测试"
}
}]
}
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.3</version>
</dependency>
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
JsonObject jsonObject = (JsonObject) new JsonParser().parse(json).getAsJsonObject();
JsonObject object = jsonObject.get("data").getAsJsonArray().get(0)
.getAsJsonObject().get("out")
.getAsJsonObject().get("result").getAsJsonObject();
多线程处理数据最后合并
package com.haier.rrswl.les.test;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* @author zhaojinhui
* @date 2021/5/18 15:15
* @apiNote
*/
public class ElevenTest {
public static void main(String[] args) {
try {
List<String> executorService = getExecutorService();
System.out.println(executorService.size());
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public static List<String> getExecutorService() throws InterruptedException{
System.out.println("开始执行多线程...");
long startTime = System.currentTimeMillis();
List<String> list = new CopyOnWriteArrayList<>();//存放返回结果
List<Integer> beforeList = new ArrayList<>(35000);
for (int i = 0; i < 35000; i++) {
beforeList.add(i);
}
List<List<Integer>> partition = Lists.partition(beforeList, 2000);
CountDownLatch countDownLatch = new CountDownLatch(partition.size());
ExecutorService executorService = Executors.newFixedThreadPool(10);
for (int i = 0; i < partition.size(); i++) {
int finalI = i;
Runnable runnable = new Runnable(){
@Override
public void run() {
List<Integer> list1 = partition.get(finalI);
for (Integer integer : list1) {
list.add(UUID.randomUUID().toString());
}
System.out.println("当前线程name : "+Thread.currentThread().getName());
countDownLatch.countDown();
}
};
executorService.execute(runnable);
}
countDownLatch.await();
System.out.println("submit总共cost 时间:" + (System.currentTimeMillis()-startTime)/1000 + "秒");
executorService.shutdown();
return list;
}
}
查询本机出口Ip
curl -s http://httpbin.org/ip
curl icanhazip.com
curl http://ip.3322.net