首先放上maven的pom.xml配置内容:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.wonggogo</groupId>
<artifactId>project-test</artifactId>
<version>1.0-SNAPSHOT</version>
<!-- spring版本管理 -->
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>1.5.19.RELEASE</version>
</parent>
<!-- 依赖 -->
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>1.3.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>runtime</scope>
</dependency>
<!-- fastJson模块 -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.47</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.11</version>
</dependency>
<!-- websocket持续推送 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-websocket</artifactId>
</dependency>
<!-- redis -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-redis</artifactId>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.9.1</version>
</dependency>
<!-- redis -->
</dependencies>
<!-- 打jar包 -->
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
第二:放上application.properties的内容:
#定义服务的端口
server.port=8883
########################mysql配置########################
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
spring.datasource.url=jdbc:mysql://localhost:3306/myTestDb?useSSL=false&useUnicode=true&characterEncoding=utf-8
spring.datasource.username=root
spring.datasource.password=123456
########### MyBatis的配置 ###########
#设置输出MyBatis执行的Sql日志
logging.level.com.wonggogo.model.mapper=DEBUG
mybatis.mapper-locations=classpath*:mapper/*.xml
mybatis.type-aliases-package=com.wonggogo.model.domain
########################redis配置########################
redis.host=localhost
redis.port=6379
redis.password=123456789
redis.timeout=10
redis.jedis.pool.max-idle=8
redis.jedis.pool.max-wait=-1
第三:创建一个自启动类,服务启动后便开始运行,实时解析日志,保存需要的信息到数据库中;使用jar包形式启动
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.wonggogo.analysisLog.redis.service.RedisManager;
import com.wonggogo.model.domain.PlatingFlow;
import com.wonggogo.model.mapper.PlatingFlowMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import javax.annotation.Resource;
import java.io.File;
import java.io.RandomAccessFile;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
*
* 使用linux的cron定时任务启动jar包,每天00:01启动,启动前检查昨天启动的任务,并kill掉;【该过程都写在shell脚本中,由cron定时启动】
* springboot中自启动类,使用文件位置指针pointer,访问文件;
* 文件位置指针缓存到redis中,以便程序异常可以从最近的位置继续解析日志。
*
*/
@Component
public class LogViewRunner implements ApplicationRunner {
private Logger logger = LoggerFactory.getLogger(LogViewRunner.class);
@Resource
private RedisManager redisManager;
//数据库操作mapper
@Resource
private PlatingFlowMapper platingFlowMapper;
//线程池,根据要分析的日志文件个数设置池大小
private ScheduledExecutorService exec = Executors.newScheduledThreadPool(1);
@Override
public void run(ApplicationArguments args) {
String dateStr = new SimpleDateFormat("yyyy-MM-dd").format(new Date());
execute("/data/logs/", "aa_test", dateStr);
}
/**
* 启动一个线程对日志进行循环解析
*
* @param path 日志路径
* @param proName 项目名,例如:aa_test
* @param dateStr 日期字符串,格式yyyy-MM-dd
*/
private void execute(String path, String proName, String dateStr) {
final long[] pointer = {0L};
String key = "pointer." + proName + dateStr;
if (redisManager.exists(key)) {
pointer[0] = (long)redisManager.get(key);
logger.info(key+"初始化指针位置:"+pointer[0]);
}
File logFile = new File(path + proName + "." + dateStr + ".0.log");
//启动一个线程每1秒读取新增的日志信息
exec.scheduleWithFixedDelay(() -> {
try {
//解析日志,并更新pointer
pointer[0] = analysisLogFile(logFile, pointer[0], key);
} catch (Exception e) {
logger.error(proName+"文件解析出现异常, 此时pointer: "+ pointer[0], e);
throw new RuntimeException();
}
}, 0, 1, TimeUnit.SECONDS);
}
/**
* 分析日志文件,并记录文件指针位置
*
* @param logFile 日志文件
* @param pointer 文件指针位置
* @param key 指针位置在redis中的key
* @throws Exception
*/
private long analysisLogFile(File logFile, long pointer, String key) throws Exception{
long len = logFile.length();
if(len < pointer){
logger.error("文件指针大于文件总长度,抛出异常!");
throw new Exception();
}else {
//指定文件可读可写
RandomAccessFile randomFile = new RandomAccessFile(logFile, "rw");
randomFile.seek(pointer);//移动文件指针位置
String tmp;
while ((tmp = randomFile.readLine()) != null) {
//新指针位置
pointer = randomFile.getFilePointer();
saveLogToTable(tmp, pointer, key);
}
randomFile.close();
}
return pointer;
}
/**
* 将解析的日志信息保存到数据库表 PlatingFlow 中
*
* @param tmp 一行日志信息
* @param pointer 日志文件指针位置
* @param key 指针位置在redis中的key
*/
private void saveLogToTable(String tmp, long pointer, String key) {
long t1 = System.currentTimeMillis();
tmp = new String(tmp.getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8);
//只解析固定格式的日志
int index = tmp.indexOf("logid=aatt,logmsg=");
if (index > 0) {
JSONObject jobj = JSON.parseObject(tmp.substring(index+18));
PlatingFlow pl = new PlatingFlow();
pl.setType(jobj.getInteger("type"));
//解析字段值并set到pl中···
platingFlowMapper.save(pl);
logger.info("==解析的日志:" + jobj+",所耗时间(ms):"+(System.currentTimeMillis()-t1));
}
//将pointer 保存在redis中,以便下次启动的时候,直接从指定位置获取
redisManager.set(key, pointer);
}
}
所引用的redis配置类:
import com.wonggogo.analysisLog.redis.utils.SerializeUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.jedis.JedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import redis.clients.jedis.JedisPoolConfig;
@Configuration
public class RedisConfig {
@Value("${redis.host}")
private String host;
@Value("${redis.port}")
private Integer port;
@Value("${redis.password}")
private String password;
@Value("${redis.jedis.pool.max-idle}")
private int maxIdle;
@Value("${redis.jedis.pool.max-wait}")
private long maxWaitMillis;
/**
* JedisPoolConfig 连接池
* @return
*/
@Bean
public JedisPoolConfig jedisPoolConfig(){
JedisPoolConfig jedisPoolConfig=new JedisPoolConfig();
//最大空闲数
jedisPoolConfig.setMaxIdle(maxIdle);
//连接池的最大数据库连接数
jedisPoolConfig.setMaxTotal(1000);
//最大建立连接等待时间
jedisPoolConfig.setMaxWaitMillis(maxWaitMillis);
//逐出连接的最小空闲时间 默认1800000毫秒(30分钟)
jedisPoolConfig.setMinEvictableIdleTimeMillis(300000);
//每次逐出检查时 逐出的最大数目 如果为负数就是 : 1/abs(n), 默认3
jedisPoolConfig.setNumTestsPerEvictionRun(10);
//逐出扫描的时间间隔(毫秒) 如果为负数,则不运行逐出线程, 默认-1
jedisPoolConfig.setTimeBetweenEvictionRunsMillis(30000);
//是否在从池中取出连接前进行检验,如果检验失败,则从池中去除连接并尝试取出另一个
jedisPoolConfig.setTestOnBorrow(true);
//在空闲时检查有效性, 默认false
jedisPoolConfig.setTestWhileIdle(true);
return jedisPoolConfig;
}
/**
* 配置工厂
* @param jedisPoolConfig
* @return
*/
@Bean
public JedisConnectionFactory jedisConnectionFactory(JedisPoolConfig jedisPoolConfig){
JedisConnectionFactory jedisConnectionFactory=new JedisConnectionFactory();
//连接池
jedisConnectionFactory.setPoolConfig(jedisPoolConfig);
//IP地址
jedisConnectionFactory.setHostName(host);
//端口号
jedisConnectionFactory.setPort(port);
//如果Redis设置有密码
jedisConnectionFactory.setPassword(password);
//客户端超时时间单位是毫秒
jedisConnectionFactory.setTimeout(5000);
return jedisConnectionFactory;
}
/**
* 实例化 RedisTemplate 对象
* @return
*/
@Bean(name = "setRedisTemplate")
public RedisTemplate setRedisTemplate(RedisConnectionFactory redisConnectionFactory) {
RedisTemplate redisTemplate = new RedisTemplate();
redisTemplate.setKeySerializer(new StringRedisSerializer());
redisTemplate.setHashKeySerializer(new StringRedisSerializer());
redisTemplate.setHashValueSerializer(new SerializeUtils());
redisTemplate.setValueSerializer(new SerializeUtils());
//开启事务
//stringRedisTemplate.setEnableTransactionSupport(true);
redisTemplate.setConnectionFactory(redisConnectionFactory);
return redisTemplate;
}
}
redis工具类:
package com.wonggogo.analysisLog.redis.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import java.util.concurrent.TimeUnit;
/**
* redis工具类
*/
@Component
public class RedisManager {
@Qualifier("setRedisTemplate")
@Autowired
private RedisTemplate<String, Object> redisTemplate;
public boolean exists(String key) {
return redisTemplate.hasKey(key);
}
public void set(String key, Object value) {
redisTemplate.opsForValue().set(key, value);
}
/**
* setex命令
*
* @param key
* @param value
* @param expire 过期时间(单位:秒)
*/
public void setEx(String key, Object value, Long expire) {
redisTemplate.opsForValue().set(key, value, expire, TimeUnit.SECONDS);
}
public Object get(String key) {
return redisTemplate.opsForValue().get(key);
}
public void del(String key) {
redisTemplate.delete(key);
}
}
序列化工具类:
import org.hibernate.type.SerializationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.serializer.RedisSerializer;
import java.io.*;
public class SerializeUtils implements RedisSerializer {
private static Logger logger = LoggerFactory.getLogger(SerializeUtils.class);
public static boolean isEmpty(byte[] data) {
return (data == null || data.length == 0);
}
/**
* 序列化
* @param object
* @return
* @throws SerializationException
*/
@Override
public byte[] serialize(Object object) throws SerializationException {
byte[] result = null;
if (object == null) {
return new byte[0];
}
try (
ByteArrayOutputStream byteStream = new ByteArrayOutputStream(128);
ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteStream)
){
if (!(object instanceof Serializable)) {
throw new IllegalArgumentException(SerializeUtils.class.getSimpleName() + " requires a Serializable payload " +
"but received an object of type [" + object.getClass().getName() + "]");
}
objectOutputStream.writeObject(object);
objectOutputStream.flush();
result = byteStream.toByteArray();
} catch (Exception ex) {
logger.error("Failed to serialize",ex);
}
return result;
}
/**
* 反序列化
* @param bytes
* @return
* @throws SerializationException
*/
@Override
public Object deserialize(byte[] bytes) throws SerializationException {
Object result = null;
if (isEmpty(bytes)) {
return null;
}
try (
ByteArrayInputStream byteStream = new ByteArrayInputStream(bytes);
ObjectInputStream objectInputStream = new ObjectInputStream(byteStream)
){
result = objectInputStream.readObject();
} catch (Exception e) {
logger.error("Failed to deserialize",e);
}
return result;
}
}
第四:如果需要每天解析,则使用linux的定时器,每天00:01分检查昨天进程,如果存在则杀死进程并重启,不存在直接重启:
使用命令crontab -e 进行编辑定时任务,使用crontab -l查看定时任务,定时任务内容:
#每天的00:01分启动日志解析程序
1 0 * * * /opt/mytest/logHandlerStart.sh
shell脚本的内容:
. ~/.bash_profile
#!/bin/bash
pid=`ps -ef | grep project-test-1.0-SNAPSHOT | grep -v grep | awk '{print $2}'`
if [ "${pid}" = "" ]
then
echo "no java is alive"
else
kill -9 ${pid}
fi
nohup java -jar /opt/mytest/project-test-1.0-SNAPSHOT.jar >/dev/null 2>&1 &
第五:使用websocket实时推送最新的入库信息:
websocket配置类:
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.websocket.*;
import javax.websocket.server.ServerEndpoint;
import java.io.IOException;
import java.util.concurrent.CopyOnWriteArraySet;
@ServerEndpoint(value = "/websocket")
@Component
public class WebSocketHandler {
private Logger logger = LoggerFactory.getLogger(WebSocketHandler.class);
//concurrent包的线程安全Set,用来存放每个客户端对应的WebSocketHandler对象。
private static CopyOnWriteArraySet<WebSocketHandler> webSocketSet = new CopyOnWriteArraySet<WebSocketHandler>();
//与某个客户端的连接会话,需要通过它来给客户端发送数据
private Session session;
//某个连接实例的持续推送线程
private Thread thread;
/**
* 连接建立成功调用的方法
**/
@OnOpen
public void onOpen(Session session) {
this.session = session;
webSocketSet.add(this); //加入set中
logger.info("有新连接加入!当前连接数为" + webSocketSet.size());
}
/**
* 连接关闭调用的方法
*/
@OnClose
public void onClose(Session session) {
//关闭循环推送
stopPush();
webSocketSet.remove(this); //从set中删除
logger.info("有一连接关闭!sessionId="+session.getId()+",当前连接数为" + webSocketSet.size());
}
/**
* 收到客户端消息后调用的方法
*
* @param message 客户端发送过来的消息
**/
@OnMessage
public void onMessage(String message, Session session) {
logger.info("来自客户端的消息:" + message + ", sessionId="+session.getId());
//循环推送
startPush(message);
}
/**
* 发生错误时调用
**/
@OnError
public void onError(Session session, Throwable error) {
logger.info("发生错误");
error.printStackTrace();
}
public void sendMessage(String message) throws IOException {
this.session.getBasicRemote().sendText(message);
}
/**
* 群发自定义消息
* */
public static void sendInfo(String message) throws IOException {
for (WebSocketHandler item : webSocketSet) {
try {
item.sendMessage(message);
} catch (IOException e) {
e.printStackTrace();
continue;
}
}
}
private void startPush(String message) {
this.thread = new Thread(new SocketThread(this, message));
this.thread.start();
}
private void stopPush() {
try {
if (this.thread != null) {
this.thread.interrupt();
}
} catch(Exception e) {
logger.error("停止推送异常:", e);
}
}
}
推送线程类:每次查询返回最大的id,下次查询从最大id开始查询
import com.alibaba.fastjson.JSONObject;
import com.zhht.flow.visual.common.PSMapperUtil;
import com.zhht.flow.visual.model.vo.RealTimeFlowData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 推送线程使用interrupt方式来停止,在catch中需要二次使用interrupt才能清除中断标识
*/
public class SocketThread implements Runnable {
private Logger logger = LoggerFactory.getLogger(SocketThread.class);
private WebSocketHandler wsh;
private String message;
public SocketThread(WebSocketHandler wsh, String message) {
this.wsh = wsh;
this.message = message;
}
@Override
public void run() {
try {
long lastMaxId = PSMapperUtil.queryMaxId();
Map<String, Long> map = new HashMap<>();
map.put("dataSource", Long.valueOf(message));
map.put("lastMaxId", lastMaxId);
List<RealTimeFlowData> pls = null;
while (true) {
pls = PSMapperUtil.queryNewFlowByDataSource(map);
if (pls!=null && pls.size()>0) {
lastMaxId = pls.get(pls.size() - 1).getId();
map.put("lastMaxId", lastMaxId);
}
wsh.sendMessage(JSONObject.toJSONString(pls));
Thread.sleep(1000);
}
} catch (InterruptedException e) {
Thread curr = Thread.currentThread();
//再次调用interrupt方法中断自己,将中断状态设置为“中断”
curr.interrupt();
// System.out.println("SocketThread IsInterrupted: " + curr.isInterrupted());
// System.out.println("Static Call: " + Thread.interrupted());//clear status
// System.out.println("---------After Interrupt Status Cleared----------");
// System.out.println("Static Call: " + Thread.interrupted());
// System.out.println("SocketThread IsInterrupted: " + curr.isInterrupted());
} catch (IOException e) {
logger.error("向客户端循环推送异常:", e);
}
logger.info("SocketThread stopped.");
}
}
查询数据库的工具类:
import com.wonggogo.model.mapper.PlatingFlowMapper;
import com.wonggogo.model.vo.RealTimeFlowData;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.util.List;
import java.util.Map;
/**
* 供普通类查询mysql使用
*/
@Component
public class PSMapperUtil {
@Resource
private PlatingFlowMapper platingFlowMapper;
private static PSMapperUtil psMapperUtil;
@PostConstruct
public void init() {
psMapperUtil = this;
}
public static List<RealTimeFlowData> queryNewFlowByDataSource(Map<String, Long> map) {
return psMapperUtil.platingFlowMapper.queryNewFlowByDataSource(map);
}
public static long queryMaxId() {
return psMapperUtil.platingFlowMapper.queryMaxId();
}
}
mapper查询的配置文件我就不写了,是关于数据库查询和统计的两个方法。
此时客户端使用websocket连接到后台之后,后台会持续推送新数据到客户端。