import java.util.ArrayList;
import java.util.List;
/**
* @author jiangli
* @date 2019/11/18 19:37
* 结合分页查询再使用
*/
public class ThreadDemo {
public static void main(String[] args) {
//获取原始数据
List<User> users = getUsers();
//对数据拆分
List<List<User>> lists = ListUtils.splitList(users, 2);
//遍历拆分后的集合,进行业务处理
for (List<User> list : lists) {
new UserThread(list).start();
}
}
private static List<User> getUsers() {
List<User> list = new ArrayList<>();
for (int i = 0; i < 11; i++) {
list.add(new User(i,"java_"+i));
}
return list;
}
}
class UserThread extends Thread{
private List<User> users;
public UserThread(List<User> users) {
this.users = users;
}
@Override
public void run() {
for (User user : users) {
System.out.println(Thread.currentThread().getId()+", userId: "+user.getId()+" , userName: "+user.getName());
//执行对用户的业务方法...
}
}
}
大量数据分页进行处理
@Test
public void importDataToEs() {
//创建索引
elasticsearchTemplate.createIndex(Goods.class);
//创建映射
elasticsearchTemplate.putMapping(Goods.class);
int page = 1;
int rows = 100;
do {
//分页查询全部上架的spu
PageResult<SpuDTO> spus = goodsApiFeign.querySpuByPage(null, true, page, rows);
if (spus.getItems().size() == 0) {
break;
}
//spu==>goods
List<Goods> goodsList = spus.getItems().stream().map(e -> {
Goods goods = new Goods();
try {
goods = searchService.buildGoods(e);
} catch (Exception e1) {
e1.printStackTrace();
}
return goods;
}).collect(Collectors.toList());
//保存到elasticsearch
goodsRepository.saveAll(goodsList);
page++;
rows = spus.getItems().size();
} while (rows == 100);
}