直接上代码
import com.alibaba.fastjson.JSON;
import com.tc.flink.analysis.label.bean.output.ItemIdWithAction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.*;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import java.util.HashMap;
import java.util.Map;
public class CityItemIdClickedState extends RichMapFunction<Tuple2<ItemIdWithAction, Integer>, Tuple2<String, String>> {
private transient MapState<ItemIdWithAction, Integer> map;
public transient static final String CLICK_PREFIX_KEY = "cityClicked";
public transient static final String CREATE_PREFIX_KEY = "cityCreated";
@Override
public void open(Configuration parameters) throws Exception {
StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.minutes(60 * 2)).setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite).setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired).build();
MapStateDescriptor<ItemIdWithAction, Integer> descriptor = new MapStateDescriptor<ItemIdWithAction, Integer>("paln_click_num", ItemIdWithAction.class, Integer.class);
descriptor.enableTimeToLive(ttlConfig);
map = getRuntimeContext().getMapState(descriptor);
super.open(parameters);
}
@Override
public Tuple2<String, String> map(Tuple2<ItemIdWithAction, Integer> keyValue) throws Exception {
Integer num = keyValue.f1;
ItemIdWithAction itemIdWithAction = keyValue.f0;
if (num.equals(map.get(itemIdWithAction))) {
return Tuple2.of(null, null);
}
map.put(itemIdWithAction, num);
String prefixKey = itemIdWithAction.getAction().equals("click") ? CLICK_PREFIX_KEY : CREATE_PREFIX_KEY;
String key = String.format("%s@%s@%s", prefixKey, itemIdWithAction.getStartCityId(), itemIdWithAction.getEndCityId());
Map<String, Integer> valueMap = new HashMap<String, Integer>();
for (ItemIdWithAction tmp : map.keys()) { //报错的异常点
valueMap.put(tmp.getItemId(), map.get(tmp));
}
return Tuple2.of(key, JSON.toJSONString(valueMap));
}
}
map是一条条处理,每次取所有MapState数据输出。
本地跑,集群跑都没问题,但当流量大MapState过大时候,就报如下错误,每天报错一两次,重启。
java.util.ConcurrentModificationException
at java.util.HashMap$HashIterator.nextNode(HashMap.java:1442)
at java.util.HashMap$EntryIterator.next(HashMap.java:1476)
at java.util.HashMap$EntryIterator.next(HashMap.java:1474)
at org.apache.flink.runtime.state.ttl.TtlMapState$EntriesIterator.hasNext(TtlMapState.java:161)
at com.tc.flink.operator.state.CityItemIdClickedState.map(CityItemIdClickedState.java:42)
at com.tc.flink.operator.state.CityItemIdClickedState.map(CityItemIdClickedState.java:14)
at org.apache.flink.streaming.api.operators.StreamMap.processElement(StreamMap.java:41)
at org.apache.flink.streaming.runtime.io.StreamInputProcessor.processInput(StreamInputProcessor.java:202)
at org.apache.flink.streaming.runtime.tasks.OneInputStreamTask.run(OneInputStreamTask.java:105)
at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:302)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:711)
at java.lang.Thread.run(Thread.java:748)
报错原因在于 for (ItemIdWithAction tmp : map.keys())
mapstate被并发修改了。
比较奇怪的是map-function单线程处理,为什么出现ConcurrentModificationException
查看TtlMapState源码
originalIterator::remove
是剔除动作。有点类似redis,当再次访问时候,才会触发剔除(有可能产生内存泄漏)。
但是map-funciton是key-by下单线程操作,为什么会出现并发问题。
再看TtlStateFactory类
确实异步删除,所以mapstate过大的时候,就会出现这种问题。
修改代码
Iterator< Map.Entry<ItemAction,Integer>> mapIterator= map.iterator();
while(mapIterator.hasNext()){
Map.Entry<ItemIdWithAction,Integer> entry= mapIterator.next();
valueMap.put(entry.getKey().getItemId(),entry.getValue());
}
犯了低级错误,不过也细读了源码