ES使用汇总
- linux环境安装配置ES单机版和集群
材料:jdk跟elastricSearch:
版本是:jdk-8u211-linux-i586.tar.gz
elasticsearch-2.4.1.tar.gz
步骤:
1.解压jdk跟elastricSearch,配置jdk环境变量,安装elastricSearch
tar zxvf jdk-8u211-linux-i586.tar.gz
tar elasticsearch-2.4.1.tar.gz
2.配置jdk环境变量:
export JAVA_HOME=/usr/jdk1.8.0_211
Export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$JAVA_HOME/bin:$PATH
3.安装es-head插件:
进入bin目录, ./plugin install mobz/elasticsearch-head
如果不行,尝试 ./plugin install mobz/elasticsearch-head -Dhttps.protocols=TLSv1.1,TLSv1.2
- ES不能用root登录的解决方式:
在启动文件elastricSearch中添加:ES_JAVA_OPTS="-Des.insecure.allow.root=true"
- 配置elastricSearch.yml:
普通版本配置:network.host: 0.0.0.0 开放给其他自己用
6.开放相关的防火墙端口
iptables -I INPUT -p tcp --dport 9200 -j ACCEPT
iptables -I INPUT -p tcp --dport 9300 -j ACCEPT
7.配置ES集群,标准配置。
1台机子多个节点:
节点1:
cluster.name: bropen
node.name: node-1
network.host: 0.0.0.0
transport.tcp.port: 9301
http.port: 9201
discovery.zen.ping.unicast.hosts: ["0.0.0.0:9300","0.0.0.0:9301"]
节点2:
cluster.name: bropen
node.name: node-2
network.host: 0.0.0.0
transport.tcp.port: 9300
http.port: 9200
discovery.zen.ping.unicast.hosts: ["0.0.0.0:9300","0.0.0.0:9301"]
- 多台机子,每台机子一个节点:
节点1:
cluster.name: bropen
node.name: node-1
network.host: 0.0.0.0
transport.tcp.port: 9300
http.port: 9200
discovery.zen.ping.unicast.hosts: ["192.168.10.234:9300","192.168.10.222:9301"]
节点2:
cluster.name: bropen
node.name: node-2
network.host: 0.0.0.0
transport.tcp.port: 9301
http.port: 9201
discovery.zen.ping.unicast.hosts: ["192.168.10.234:9300","192.168.10.222:9301"]
- Java操作ES
单机版本获取连接:
使用经验:因ES本身是异步并发的,所以我们创建client只需创建单例就可以了。
public static synchronized Client getClient() throws UnknownHostException {
if(client==null)
{
client = TransportClient.builder().build()
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(host), 9300));
}
return client;
}
集群版连接:
集群版中从文件中读ip跟端口,然后创建连接:
public static synchronized Client getClient() throws UnknownHostException {
if(client==null)
{
List<String> ls=FileUtil.toArrayByFileReader1(ESUtils.class.getResource("").getPath()+"EsInformation.txt");
String host[]=ls.get(0).split("=")[1].toString().split(",");
String port[]=ls.get(1).split("=")[1].toString().split(",");
Settings settings = Settings.builder()
.put("cluster.name", "bropen") //跟配置的es名字一致
.put("tclient.transport.sniff", true).build();
client = TransportClient.builder().settings(settings).build();
for(int i=0;i<host.length;i++)
{
((TransportClient) client).addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(host[i]), Integer.valueOf(port[i])));
}
}
return client;
}
读取文件配置的端口跟Ip:
public class FileUtil {
public static List<String> toArrayByFileReader1(String name) {
// 使用ArrayList来存储每行读取到的字符串
ArrayList<String> arrayList = new ArrayList<String>();
try {
FileReader fr = new FileReader(name);
BufferedReader bf = new BufferedReader(fr);
String str;
// 按行读取字符串
while ((str = bf.readLine()) != null) {
arrayList.add(str);
}
bf.close();
fr.close();
} catch (IOException e) {
e.printStackTrace();
}
// 对ArrayList中存储的字符串进行处理
return arrayList;
}
}
目录如下:
获取连接成功后那么就是增删查改了:
经验一:因es会分词,中文情况下在2.4.1版本中无法精确查询,所以使用了
\"index\": \"not_analyzed\"
这个在创建表的时候,能够使es精确查询中文英文,这里的意义是默认不分词。
创建表:
public static void createIndexConfig() throws UnknownHostException, JsonProcessingException {
Client client = getClient();
// 如果存在就先删除索引
if (client.admin().indices().prepareExists("cmsweb").get().isExists()) {
client.admin().indices().prepareDelete("cmsweb").get();
}
String str="";
// 创建索引,并设置mapping.
for(int i=0;i<100;i++)
{
if(i<10){
str="0"+i;
}
else
{
str=""+i;
}
String mappingStr = "{ \"alarm_attribute_data"+str+"config\" : { \"properties\": { \"id\": { \"type\": \"long\" }, \"resId\": {\"type\": \"long\"},\"metricId\": {\"type\": \"string\"}, \"currentValue\": {\"type\": \"string\"}, \"metricUnit\": {\"type\": \"string\"}, \"metircTypes\": {\"type\": \"string\"}, \"metricsStatus\": {\"type\": \"string\"}, \"createTime\": {\"type\": \"long\"}}}}";
client.admin().indices().prepareCreate("cmsweb").addMapping("alarm_attribute_data"+str+"config", mappingStr).get();
}
// client.close();
}
插入:
public static void saveTableMetric(Map<String, List<AlarmAttributeDataVo>> map) throws SQLException, UnknownHostException {
int count = 0;
for (Entry<String, List<AlarmAttributeDataVo>> entry : map.entrySet()) {
List<AlarmAttributeDataVo> values = entry.getValue();
if(values == null||values.size() == 0) return;
Client client = ESUtils.getClient();
BulkRequestBuilder bulkRequest = client.prepareBulk();
int valSize = values.size();
String tableName = entry.getKey();
if(tableName != null &&!"".equals(tableName)){
try{
for (int i = 0; i < values.size(); i++) {
// 数据读取(略)根据自已的业务进行。
byte[] json = mapper.writeValueAsBytes(values.get(i));
bulkRequest.add(client.prepareIndex("cmsweb", tableName).setSource(json));
}
bulkRequest.execute().actionGet();
System.out.println("批量插入完毕");
}catch(Exception e){
e.printStackTrace();
}
// client.close();
}
}
}
查询:
- 踩过的坑:查询需要.setSize(),否则默认返回10条
复杂的聚合查询:
public static void getAttributeDataOneTable(String table,List<HistoryDataVo> ls) throws JsonParseException, JsonMappingException, IOException
{
long time=System.currentTimeMillis();
long saveTime=System.currentTimeMillis();
Client client = ESUtils.getClient();
if(table.indexOf("alarm_attribute_data")>-1)
{
time=timeUtil.get10minBeforTime();
saveTime=timeUtil.stringToLong2(timeUtil.dateToStringRound(new Date(time),"3"));
}
else if(table.indexOf("min")>-1)
{
time=timeUtil.get1hourBeforTime();
saveTime=timeUtil.stringToLong2(timeUtil.dateToStringRound(new Date(time),"1"));
}
else
{
time=timeUtil.get1dayBeforTime();
saveTime=timeUtil.stringToLong2(timeUtil.dateToStringRound(new Date(time),"2"));
}
QueryBuilder qb = new BoolQueryBuilder()
.must(QueryBuilders.rangeQuery("createTime").gte(time))
.must(QueryBuilders.matchQuery("metricTypes","capability"));
SearchResponse search = client.prepareSearch("cmsweb").setTypes(table)
.setQuery(qb).addAggregation(AggregationBuilders.terms("metricIds")
.field("metricId").subAggregation(AggregationBuilders.avg("avgCurrentValue").field("currentValue"))
.subAggregation(AggregationBuilders.max("maxCurrentValue").field("currentValue"))
.subAggregation(AggregationBuilders.min("minCurrentValue").field("currentValue")))//分组后面加返回数据条数
.setSize(0).get();
Map<String, Aggregation> aggMap =search.getAggregations().getAsMap();
StringTerms teams = (StringTerms) aggMap.get("metricIds");
for (Terms.Bucket teamBucket : teams.getBuckets()) {
//获取指标id
String metricId = (String) teamBucket.getKey();
Map<String, Aggregation> subAggMap = teamBucket.getAggregations().getAsMap();
InternalMax maxAge = (InternalMax)subAggMap.get("maxCurrentValue");
InternalAvg avgAges = (InternalAvg)subAggMap.get("avgCurrentValue");
InternalMin minSalary = (InternalMin)subAggMap.get("minCurrentValue");
double maxAgeValue = maxAge.getValue();
double minValue = minSalary.getValue();
double avgValues = avgAges.getValue();
HistoryDataVo vo=new HistoryDataVo();
vo.setMaxCurrentValue(maxAgeValue);
vo.setMinCurrentValue(minValue);
vo.setCurrentValue(avgValues);
vo.setCreateTime(saveTime);
vo.setMetricId(metricId);
vo.setResId(Long.valueOf(getIntfromString(metricId)));
vo.setMetricTypes("capability");
System.out.println(metricId + " " + maxAgeValue + " " + minValue+" "+avgValues);
ls.add(vo);
}
// client.close();
}
普通查询:
public static Map<String,List<HistoryDataVo>> getDatReportList(String dataTable, String metrId, String startTime, String endTime,String deviceIds) throws JsonParseException, JsonMappingException, IOException{
Map<String,List<HistoryDataVo>> dateMap=new HashMap<String, List<HistoryDataVo>>();
String resId[]=deviceIds.split(",");
Client client = ESUtils.getClient();
long startTimes=timeUtil.stringToLong2(startTime);
long endTimes=timeUtil.stringToLong2(endTime);
for(int i=0;i<resId.length;i++){
String metricId=resId[i]+metrId;
SortBuilder sortBuilder = SortBuilders.fieldSort("createTime")
.order(SortOrder.ASC).ignoreUnmapped(true);
QueryBuilder qb = new BoolQueryBuilder()
.must(QueryBuilders.termQuery("metricId",metricId))
.must(QueryBuilders.rangeQuery("createTime").gte(startTimes))
.must(QueryBuilders.termQuery("resId",resId[i]))
.must(QueryBuilders.rangeQuery("createTime").lte(endTimes));
SearchResponse response = client.prepareSearch("cmsweb").setTypes(dataTable).setQuery(qb).setSize(1000).addSort(sortBuilder).execute()
.actionGet();
SearchHit[] hits = response.getHits().getHits();
List<HistoryDataVo> esMap = new ArrayList<HistoryDataVo>();
for (SearchHit hit : hits) {
HistoryDataVo map = mapper.readValue(hit.getSourceAsString(), HistoryDataVo.class);
esMap.add(map);
}
dateMap.put(resId[i],esMap);
}
// client.close();
return dateMap;