过滤list集合
主要是记录list集合中有重复的,需过滤掉重复内容
方法一:
重写Object过滤方法(确保自己所有代码中不会用到原有的过滤方法)
@Override
public int hashCode() {
//根据那个字段去重
return this.getAuthorityId();
}
@Override
public boolean equals(Object o) {
//AuthorityTree 去重的实体类
if(o instanceof AuthorityTree && ((AuthorityTree) o).getAuthorityId().equals(this.getAuthorityId())) {
return true;
}
return false;
}
//调用 authTree--需去重的变量(List)
authTree.stream().distinct().collect(Collectors.toList());
方法二:
遍历集合,删除重复的
Set<Integer> distinct = new HashSet<>();
Iterator<AuthorityTree> iterator = authTree.iterator();
while (iterator.hasNext()) {
AuthorityTree next = iterator.next();
//根据那个字段去重
if(distinct.contains(next.getAuthorityId())) {
iterator.remove();
continue;
}
distinct.add(next.getAuthorityId());
}
方法三:
使用HashMap 写一个工具类
public static <T> Predicate<T> distinctByKey(Function<? super T, ?> keyExtractor) {
Map<Object,Boolean> seen = new ConcurrentHashMap<>();
return t -> seen.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null;
}
//使用方法:用Stream接口的 filter()接收为参数
list.stream().filter(distinctByKey(User::getId)).collect(Collectors.toList());
参考:https://www.cnblogs.com/unknows/p/13534953.html