目前项目中将日志记录在ES中,就找了下比较简单的操作ES的方法找了好做最后选择用bboss,用起来太简单…
package com.td.tdgistaskservice.es.crud;
import com.td.tdgistaskservice.es.entity.LoggerDemo;
import org.frameworkset.elasticsearch.ElasticSearchException;
import org.frameworkset.elasticsearch.boot.BBossESStarter;
import org.frameworkset.elasticsearch.client.ClientInterface;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.text.ParseException;
@Service
public class LoggerCrud {
@Autowired
private BBossESStarter bbossESStarter;
@Value("${es.indicename}")
private String indicename;
@Value("${es.esmapper}")
private String esmapper;
@Value("${es.createindicename}")
private String createindicename;
public void createIndice(){
//创建加载配置文件的客户端工具,单实例多线程安全
ClientInterface clientUtil = bbossESStarter.getConfigRestClient(esmapper);
try {
//判读索引表demo是否存在,存在返回true,不存在返回false
boolean exist = clientUtil.existIndice(indicename);
//如果索引表demo已经存在先删除mapping
if(exist) {
return;
/* clientUtil.dropIndice(indicename);
clientUtil.existIndice(indicename);
clientUtil.getIndice(indicename);//获取最新建立的索引表结构*/
}
//创建索引表demo
clientUtil.createIndiceMapping(indicename,createindicename);//索引表mapping dsl脚本名称,在esmapper/demo.xml中定义createDemoIndice
clientUtil.getIndice(indicename);//获取最新建立的索引表结构
} catch (ElasticSearchException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void addAndUpdateDocument(LoggerDemo demo) throws ParseException {
//创建创建/修改/获取/删除文档的客户端对象,单实例多线程安全
ClientInterface clientUtil = bbossESStarter.getRestClient();
clientUtil.addDocument(indicename,indicename, demo);
}
}
package com.td.tdgistaskservice.es.entity;
import com.frameworkset.orm.annotation.ESId;
import lombok.Data;
import org.frameworkset.elasticsearch.entity.ESBaseData;
@Data
public class LoggerDemo extends ESBaseData {
//设定文档标识字段
@ESId
private Long loggerid;
private String loggercontent;
/** 当在mapping定义中指定了日期格式时,则需要指定以下两个注解,例如
*
"agentStarttime": {
"type": "date",###指定多个日期格式
"format":"yyyy-MM-dd HH:mm:ss.SSS||yyyy-MM-dd'T'HH:mm:ss.SSS||yyyy-MM-dd HH:mm:ss||epoch_millis"
}
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSS")
@Column(dataformat = "yyyy-MM-dd HH:mm:ss.SSS")
*/
private String agentStarttime;
private String loggerlevel;
private String loggermodel;
}
<properties>
<!--
创建demo需要的索引表结构
-->
<property name="createloggerIndice">
<![CDATA[{
"settings": {
"number_of_shards": 6,
"index.refresh_interval": "5s"
},
"mappings": {
"td_gis_logger": {
"properties": {
"loggerid":{
"type":"long"
},
"loggercontent": {
"type": "text"
},
"agentStarttime": {
"type": "text"
},
"loggerlevel": {
"type": "text"
},
"loggermodel": {
"type": "text"
}
}
}
}
}]]>
</property>
</properties>
配置文件
server:
port: 9966
spring:
http:
multipart:
max-file-size: 100Mb
max-request-size: 100Mb
profiles:
active: develop
elasticsearch:
bboss:
elasticUser: elastic
elasticPassword: changeme
elasticsearch:
rest:
hostNames: 10.1.3.49:9200
##hostNames: 192.168.8.25:9200,192.168.8.26:9200,192.168.8.27:9200 ##集群地址配置
dateFormat: yyyy.MM.dd
timeZone: Asia/Shanghai
ttl: 2d
showTemplate: true
discoverHost: false
dslfile:
refreshInterval: -1
http:
timeoutConnection: 5000
timeoutSocket: 5000
connectionRequestTimeout: 5000
retryTime: 1
maxLineLength: -1
maxHeaderCount: 200
maxTotal: 400
defaultMaxPerRoute: 200
soReuseAddress: false
soKeepAlive: false
timeToLive: 3600000
keepAlive: 3600000
keystore:
keyPassword:
hostnameVerifier: