安全终止线程
自然终止: run()方法执行完成或者抛出一个未处理的异常导致线程提前结束;
手动终止:1、suspend()、resume()和stop()暴力中止线程,存在风险;2、通过对中断标志位的设置和判别来终止线程。在线程内部,中断状态通过标识位表示,线程中断可通过对标识位的访问实现,该方式最适合用来取消/停止任务(除了中断标识,还可以定义一个布尔变量来控制是否需要停止任务并终止该线程);
thread = new Thread(new RunnableImpl());
thread.start(); 线程启动;
thread.interrupt(); 设置中断标志位;
......
thread.isInterrupted(); 判断当前线程是否被中断;
Thread.interrupted(); 判断当前线程是否被中断(并清理中断状态,设置标志位flag = false);
intrrrupt()方法修改中断标志位,当线程处于阻塞状态:立马退出阻塞,并抛出InterruptedException异常,通过捕获这个异常,来让线程退出;当线程处于运行状态:运行不受影响,仅标记了线程的中断为true。在线程任务中调用isInterrupted()方法查看是否被中断并退出;
注意:许多声明抛出InterruptedException的方法(如:Thread.sleep(long millis)),在抛出InterruptedException之前,Java虚拟机会先将该线程的中断标识位清除,此时调用isInterrupted()方法将会返回false。
疑问:线程中止后,需要手动清理其资源吗?目前未见相关操作
package *.controller;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.ctsi.peakcomp.constant.Constant;
import com.ctsi.util.HttpClientPoolUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.annotations.Param;
import org.apache.logging.log4j.core.tools.picocli.CommandLine;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.HttpAsyncResponseConsumerFactory;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.PostConstruct;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.stream.Collectors;
import static java.lang.Thread.sleep;
@Slf4j
@RestController
@RequestMapping("/peakCache")
public class PeakController {
@Value("${peakCache.metricIps}")
private String metricIps;
@Value("${peakCache.tsdbUrl}")
private String tsdbUrl;
@Value("${peakCache.appKey}")
private String appKey;
@Value("${peakCache.appSecret}")
private String appSecret;
@Autowired
private HttpClientPoolUtil httpClientPoolUtil;
@Autowired
private StringRedisTemplate redisTemplate;
@PostConstruct
private void init() {
List<String> appendMetricIps = Arrays.stream(metricIps.split(",")).collect(Collectors.toList());
Constant.metricIpList.addAll(appendMetricIps);
}
Thread thread = null;
List<String> successCache = new ArrayList<>();
List<String> failureCache = new ArrayList<>();
@Autowired
@Qualifier("restHighLevelClient")
RestHighLevelClient esClient;
SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMM");
/**
* 查询欲缓存的峰值数据
*/
@RequestMapping("/cacheMetricsAndMgmtIps/fetch")
public List fetchMetricsAndMgmtIps(@Param("startTime")String startTime,@Param("endTime")String endTime) throws IOException {
List<String> list = this.queryMetricsAndMgmtIps(startTime, endTime);
return list;
}
private List<String> queryMetricsAndMgmtIps(String startTime, String endTime) throws IOException {
List<String> cacheItems = new ArrayList<>();
/// 指定索引、类型
SearchRequest searchRequest = new SearchRequest("idx_flow_" + sdf2.format(new Date()));
/// 指定查询条件
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
QueryBuilder queryBuilder = QueryBuilders
.boolQuery()
.must(QueryBuilders.matchQuery("url", "/dataservice/api/queryData/pon"))
.must(QueryBuilders.rangeQuery("comeTime").from(startTime).to(endTime))
.must(QueryBuilders.wildcardQuery("reqJson.keyword", "*24h-max-zero*"));
searchSourceBuilder.query(queryBuilder);
String[] fields = {"reqJson"};
searchSourceBuilder.fetchSource(fields, null);
searchSourceBuilder.from(0);
searchSourceBuilder.size(10000);
searchRequest.source(searchSourceBuilder);
/// 查询数据
// SearchResponse searchResponse = this.client.search(searchRequest, RequestOptions.DEFAULT);
RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
HttpAsyncResponseConsumerFactory consumerFactory =
new HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory(200 * 1024 * 1024); //1g
builder.setHttpAsyncResponseConsumerFactory(consumerFactory);
SearchResponse searchResponse = this.esClient.search(searchRequest, builder.build());
/// 结果解析
SearchHits hits = searchResponse.getHits();
SearchHit[] searchHits = hits.getHits();
for (SearchHit hit : searchHits) {
Map<String, Object> sourceAsMap = hit.getSourceAsMap();
JSONObject reqJson = JSONObject.parseObject((String) sourceAsMap.get("reqJson"));
JSONArray queries = (JSONArray)reqJson.get("queries");
String metric = (String)queries.getJSONObject(0).get("metric");
String ip = (String)queries.getJSONObject(0).getJSONObject("tags").get("MgmtIp");
cacheItems.add(metric + "@" + ip);
}
return cacheItems;
}
/**
* 开始缓存昨天的峰值数据
*/
@RequestMapping("/cacheMetricsAndMgmtIps/start")
public Object startCacheMetricsAndMgmtIps() {
/******************************************************
/// 调用N次就会启动N个不同的线程实例运行,但thread会丢失对旧线程实例的控制
thread = new Thread(new RunnableImpl());
thread.start();
*******************************************************/
if(null == thread){ /// 未初始化
thread = new Thread(new RunnableImpl());
thread.start();
}else if(!thread.isAlive()){ /// 执行结束
thread = new Thread(new RunnableImpl());
thread.start();
}else{ /// 执行中...
return resMap(-1, false, "峰值缓存写入线程正在运行中...", null);
}
return resMap(0, true, "峰值缓存写入线程已启动", null);
}
/**
* 停止缓存昨天的峰值数据
*/
@RequestMapping("/cacheMetricsAndMgmtIps/stop")
public Object stopCacheMetricsAndMgmtIps() {
if(null != thread && thread.isAlive()){
/// 设置缓存写入线程中断标志为true
thread.interrupt();
return resMap(0, true, "峰值缓存写入线程已停止", null);
}else{
return resMap(-1, false, "峰值缓存写入线程不在运行", null);
}
}
private Map resMap(int code, boolean isSuccess, String message, Object value) {
Map<String, Object> resMap = new HashMap<>();
resMap.put("code", code);
resMap.put("success", isSuccess);
resMap.put("message", message);
resMap.put("value", value);
return resMap;
}
public class RunnableImpl implements Runnable {
@Override
public void run() {
if(null==appKey || "".contains(appKey)) {
appKey = "******";
}
if(null==appSecret || "".contains(appSecret)) {
appSecret = "******";
}
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -1);
String yesterday = new SimpleDateFormat( "yyyy/MM/dd").format(cal.getTime());
for(int i = 0; i < Constant.metricIpList.size(); i++){
/****************************************
* 查看线程中断标志位是否为true,是则中止线程执行
*****************************************/
if(Thread.currentThread().isInterrupted()) {
log.info("----- 峰值缓存写入线程被终止 -----");
break;
}
/// 中断标识置位,sleep()会抛出InterruptedException,
try{
sleep(20);
}catch(InterruptedException e){
log.info("----- 峰值缓存写入线程被终止 -----");
break;
}
String metric = Constant.metricIpList.get(i).split("@")[0].trim();
String mgmtIp = Constant.metricIpList.get(i).split("@")[1].trim();
String jsonStr = "{\n" +
" \"start\": \"" + yesterday + " 00:00:01\", \n" +
" \"end\": \"" + yesterday + " 23:59:59\", \n" +
" \"queries\": [{\n" +
" \"aggregator\": \"none\", \n" +
" \"metric\": \"" + metric + "\", \n" +
" \"downsample\": \"24h-max-zero\", \n" +
" \"tags\": {\n" +
" \"MgmtIp\": \"" + mgmtIp + "\"\n" +
" }}]\n" +
"}";
Long startDate = new Date().getTime();
ResponseEntity<JSONObject> respEntity;
try{
respEntity = httpClientPoolUtil.doTsdbPost(tsdbUrl, jsonStr, appKey, appSecret);
}catch(Exception e) {
log.error("峰值请求失败:{}", e.getMessage());
failureCache.add(metric + "@" + mgmtIp);
continue;
}
int respCode = respEntity.getStatusCodeValue();
JSONObject respBody = respEntity.getBody();
if(respCode != 200 || "false".equalsIgnoreCase(respBody.getString("success"))) {
log.error("峰值请求失败:{}, {}", jsonStr, respBody);
failureCache.add(metric + "@" + mgmtIp);
continue;
}
log.info("峰值请求成功:{}:{},返还码: {}, 查询耗时:{}, 数据量:{}",
i, metric + "@" + mgmtIp, respCode, new Date().getTime()-startDate, respBody.getJSONArray("value").size() );
/// log.info("峰值计算请求结果:{}", respBody);
try{
redisTemplate.delete(metric + "@" + mgmtIp);
redisTemplate.opsForValue().set(metric + "@" + mgmtIp, respBody.toJSONString());
}catch(Exception e){
log.error("峰值缓存失败: {}: {}", metric + "@" + mgmtIp, e.getMessage());
failureCache.add(metric + "@" + mgmtIp);
continue;
}
successCache.add(metric + "@" + mgmtIp);
log.info("峰值缓存成功: {}", metric + "@" + mgmtIp);
}
log.info("===== 峰值缓存写入线程结束 =====");
}
}
}