es分页用from+size的方式超过1万条就查不出数据了。
查询大于1万的数据这里使用scroll和go-elasiticsearch简单写一个demo
package main
import (
"bytes"
"fmt"
"io"
"log"
"net/http"
"os"
"strconv"
"strings"
"sync"
"time"
"github.com/elastic/go-elasticsearch/v8"
"github.com/tidwall/gjson"
)
var c *elasticsearch.Client
var once sync.Once
func main() {
log.SetFlags(0) //这里设置的0即取消log格式化输出,输出的内容和使用fmt包下的println()格式一样
var (
batchNum int
scrollID string
)
es := setElastic([]string{"http://ip:9200"})
// Index 100 documents into the "test-scroll" index
//测试写入100个document
log.Println("Indexing the documents...")
for i := 1; i <= 100; i++ {
res, err := es.Index(
"test-scroll",
strings.NewReader(`{"title" : "test"}`),
es.Index.WithDocumentID(strconv.Itoa(i)),
)
if err != nil || res.IsError() {
log.Fatalf("Error: %s: %s", err, res)
}
}
es.Indices.Refresh(es.Indices.Refresh.WithIndex("test-scroll"))
// Perform the initial search request to get
// the first batch of data and the scroll ID
//
log.Println("Scrolling the index...")
log.Println(strings.Repeat("-", 80))
res, _ := es.Search(
es.Search.WithIndex("test-scroll"),
es.Search.WithSort("_doc"),
es.Search.WithSize(10),