1、批量查询
@Test
public void testMultiSearch() {
MultiGetResponse mgResponse = client.prepareMultiGet()
.add("index1","blog","1","2")
.add("my-index","persion","1","2","2")
.get();
for(MultiGetItemResponse response:mgResponse){
GetResponse rp=response.getResponse();
if(rp!=null && rp.isExists()){
System.out.println(rp.getSourceAsString());
}
}
}
也可以用迭代器遍历数据
Iterator<MultiGetItemResponse> iterator = multiGetItemResponses.iterator();
while(iterator.hasNext()) {
MultiGetItemResponse next = iterator.next();
GetResponse response = next.getResponse();
System.out.println(response.toString());
}
2、批量增加
@Test
public void testMultiPut() throws IOException {
BulkRequestBuilder bulkRequest = client.prepareBulk();
bulkRequest.add(client.prepareIndex("twitter", "tweet", "1")
.setSource(jsonBuilder()
.startObject()
.field("user", "kimchy")
.field("postDate", new Date())
.field("message", "message1")
.endObject()
)
);
bulkRequest.add(client.prepareIndex("twitter", "tweet", "2")
.setSource(jsonBuilder()
.startObject()
.field("user", "kimchy")
.field("postDate", new Date())
.field("message", "message2")
.endObject()
)
);
//批量执行
BulkResponse bulkResponse = bulkRequest.get();
System.out.println(bulkResponse.status());
if (bulkResponse.hasFailures()) {
// process failures by iterating through each bulk response item
System.out.println("存在失败操作");
}
}
批量插入方式二:
@Test
public void testBulk() throws IOException {
//数据1
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder();
XContentBuilder xcb = xContentBuilder.startObject();
xcb.field("name", "lucas");
xcb.field("age", "30");
xcb.field("gender", "nan");
xContentBuilder.endObject();
IndexRequestBuilder indexRequestBuilder = client.prepareIndex("pet", "cat", "1");
IndexRequestBuilder source1 = indexRequestBuilder.setSource(xcb);
//数据2
XContentBuilder xContentBuilder2 = XContentFactory.jsonBuilder();
XContentBuilder xcb2 = xContentBuilder2.startObject();
xcb2.field("name", "jack");
xcb2.field("age", "40");
xcb2.field("gender", "nv");
xContentBuilder2.endObject();
IndexRequestBuilder indexRequestBuilder2 = client.prepareIndex("pet", "cat", "2");
IndexRequestBuilder source2 = indexRequestBuilder2.setSource(xcb2);
// System.out.println(source);
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
bulkRequestBuilder.add(source1);
bulkRequestBuilder.add(source2);
BulkResponse bulkItemResponses = bulkRequestBuilder.get();
}
3、Bulk Processor(批量处理器)
BulkProcessor类提供了一个简单接口,可以根据请求的数量或大小自动刷新批量操作,也可以在给定的时间段之后自动刷新批量操作。
@Test
public void testBulkProcessor() {
BulkProcessor bulkProcessor = BulkProcessor.builder(
client,
new BulkProcessor.Listener() {
public void beforeBulk(long executionId, BulkRequest request) {
//设置bulk批处理的预备工作
System.out.println("请求数:" + request.numberOfActions());
}
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
//设置bulk批处理的善后工作
if (!response.hasFailures()) {
System.out.println("执行成功!");
} else {
System.out.println("执行失败!");
}
}
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
//设置bulk批处理的异常处理工作
System.out.println(failure);
}
})
.setBulkActions(1000)//设置提交批处理操作的请求阀值数
.setBulkSize(new ByteSizeValue(5, ByteSizeUnit.MB))//设置提交批处理操作的请求大小阀值
.setFlushInterval(TimeValue.timeValueSeconds(5))//设置刷新索引时间间隔
.setConcurrentRequests(1)//设置并发处理线程个数
//设置回滚策略,等待时间100ms,retry次数为3次
.setBackoffPolicy(BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(100), 3))
.build();
// Add your requests
bulkProcessor.add(new DeleteRequest("twitter", "tweet", "1"));
bulkProcessor.add(new DeleteRequest("twitter", "tweet", "2"));
// 刷新所有请求
bulkProcessor.flush();
// 关闭bulkProcessor
bulkProcessor.close();
// 刷新索引
client.admin().indices().prepareRefresh().get();
// Now you can start searching!
client.prepareSearch().get();
}
查询删除
@Test
public void testDeleteByQuery() {
BulkByScrollResponse response = DeleteByQueryAction.INSTANCE
.newRequestBuilder(client)
.filter(QueryBuilders.matchQuery("name", "王五"))
.source("my-index")//设置索引名称
.get();
//被删除文档数目
long deleted = response.getDeleted();
System.out.println(deleted);
}