elasticsearch进行对字段去重计数,官方的文档很麻烦,我用的开源框架bboss 代码如下,有需要参考的可以看下。框架地址:https://github.com/bbossgroups/bboss
package org.frameworkset.elasticsearch.imp;
import org.frameworkset.elasticsearch.ElasticSearchHelper;
import org.frameworkset.elasticsearch.client.ClientInterface;
import org.frameworkset.spi.assemble.PropertiesContainer;
import com.alibaba.fastjson.JSONObject;
import com.frameworkset.common.poolman.SQLExecutor;
import com.frameworkset.common.poolman.util.SQLUtil;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
public class Dbdemo{
public static void main(String[] args) {
System.out.println("program is run");
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.HOUR_OF_DAY, 14); // 控制时
calendar.set(Calendar.MINUTE, 44); // 控制分
calendar.set(Calendar.SECOND, 0); // 控制秒
Date time = calendar.getTime(); // 得出执行任务的时间,此处为今天的12:00:00
Timer timer = new Timer();
timer.scheduleAtFixedRate(new TimerTask() {
public void run() {
System.out.println("-------设定要指定任务--------");
try {
//下面这三行用来查询出总数
Dbdemo search =new Dbdemo();
long count=search.testCountAll();
System.out.println("record count :"+count);
//查询总数在这里结束
ClientInterface clientUtil = ElasticSearchHelper.getConfigRestClientUtil("estrace/ESTracesqlMapper.xml");
Map params=null;
String response = clientUtil.executeRequest("dbdemo/_search", "termAgg", params);
System.out.println(response);
JSONObject jsonObject = JSONObject.parseObject(response);
String traces = jsonObject.getString("aggregations");
System.out.println(traces);
JSONObject json= JSONObject.parseObject(traces);
String value = json.getString("traces");
System.out.println(value);
JSONObject js= JSONObject.parseObject(value);
int val =js.getInteger("value");
System.out.println(val);
SimpleDateFormat def = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
String nowTime = def.format(new Date());
System.out.println(nowTime);
PropertiesContainer propertiesContainer = new PropertiesContainer();
propertiesContainer.addConfigPropertiesFile("application.properties");
String dbName = propertiesContainer.getProperty("db.name");
String dbUser = propertiesContainer.getProperty("db.user");
String dbPassword = propertiesContainer.getProperty("db.password");
String dbDriver = propertiesContainer.getProperty("db.driver");
String dbUrl = propertiesContainer.getProperty("db.url");
String validateSQL = propertiesContainer.getProperty("db.validateSQL");
String _jdbcFetchSize = propertiesContainer.getProperty("db.jdbcFetchSize");
Integer jdbcFetchSize = null;
if(_jdbcFetchSize != null && !_jdbcFetchSize.equals(""))
jdbcFetchSize = Integer.parseInt(_jdbcFetchSize);
//启动数据源
SQLUtil.startPool(dbName,//数据源名称
dbDriver,//mysql驱动
dbUrl,//mysql链接串
dbUser,dbPassword,//数据库账号和口令
validateSQL, //数据库连接校验sql
jdbcFetchSize // jdbcFetchSize
);
try {
SQLExecutor.insert("insert into apclientcount (MacSum,Macs,RecordTime) values(?,?,?)",count,val,nowTime);
} catch (Exception e) {
// TODO 自动生成的 catch 块
}
} catch (Exception e) {
// TODO: handle exception
}
}
}, time, 1000 * 60 * 60 * 24);
}
/**
* 统计索引中有多少文档
*/
public long testCountAll(){
ClientInterface clientInterface = ElasticSearchHelper.getRestClientUtil();
long esDatas = clientInterface.countAll("dbdemo");
return esDatas;
}
}