import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
public class ValueStateDemo {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(10000);
DataStreamSource<String> lines = env.socketTextStream("linux01", 8888);
SingleOutputStreamOperator<Tuple2<String, Integer>> flatmaplines = lines.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
@Override
public void flatMap(String line, Collector<Tuple2<String, Integer>> collector) throws Exception {
String[] words = line.split(",");
for (String word : words) {
collector.collect(Tuple2.of(word, 1));
}
}
});
KeyedStream<Tuple2<String, Integer>, String> keyedlines = flatmaplines.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
@Override
public String getKey(Tuple2<String, Integer> tp) throws Exception {
return tp.f0;
}
});
SingleOutputStreamOperator<Tuple2<String, Integer>> res = keyedlines.map(new RichMapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {
private transient ValueState<Integer> valueState;
@Override
public void open(Configuration parameters) throws Exception {
ValueStateDescriptor<Integer> integerValueStateDescriptor = new ValueStateDescriptor<Integer>("value-state", Integer.class);
valueState = getRuntimeContext().getState(integerValueStateDescriptor);
}
@Override
public Tuple2<String, Integer> map(Tuple2<String, Integer> values) throws Exception {
Integer current = values.f1;
Integer historyvalue = valueState.value();
if (historyvalue == null) {
historyvalue = 0;
}
Integer total = historyvalue + current;
valueState.update(total);
values.f1 = total;
return values;
}
});
res.print();
env.execute("ValueStateDemo");
}
}
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import javax.xml.validation.TypeInfoProvider;
public class valueStateanli {
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(10000);
DataStreamSource<String> lines = env.socketTextStream("linux01", 8888);
SingleOutputStreamOperator<Tuple3<String, String, String>> maplines = lines.map(new MapFunction<String, Tuple3<String, String, String>>() {
@Override
public Tuple3<String, String, String> map(String line) throws Exception {
String[] fields = line.split(",");
return Tuple3.of(fields[0], fields[1], fields[2]);
}
});
KeyedStream<Tuple3<String, String, String>, Tuple2<String, String>> keyedline = maplines.keyBy(new KeySelector<Tuple3<String, String, String>, Tuple2<String, String>>() {
@Override
public Tuple2<String, String> getKey(Tuple3<String, String, String> tp) throws Exception {
return Tuple2.of(tp.f1, tp.f2);
}
});
SingleOutputStreamOperator<Tuple4<String, String, Integer, Integer>> res = keyedline.map(new ActivityFun());
res.print();
env.execute("valueStateanli");
}
}
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import java.util.HashSet;
import java.util.Set;
public class ActivityFun extends RichMapFunction<Tuple3<String,String,String>, Tuple4<String,String,Integer,Integer>> {
private transient ValueState<Set<String>> renshuState;
private transient ValueState<Integer> cishuState;
@Override
public void open(Configuration parameters) throws Exception {
ValueStateDescriptor<Set<String>> stateDescriptor = new ValueStateDescriptor<>("renshu", TypeInformation.of(new TypeHint<Set<String>>() {
}));
renshuState= getRuntimeContext().getState(stateDescriptor);
ValueStateDescriptor<Integer> stateDescriptor1 = new ValueStateDescriptor<>("cishuState", Integer.class);
cishuState = getRuntimeContext().getState(stateDescriptor1);
}
@Override
public Tuple4<String, String, Integer, Integer> map(Tuple3<String, String, String> tp) throws Exception {
String uid = tp.f0;
Set<String> ren = renshuState.value();
if (ren==null){
ren = new HashSet<>();
}
ren.add(uid);
renshuState.update(ren);
Integer cishu = cishuState.value();
if(cishu==null){
cishu=0;
}
cishu+=1;
cishuState.update(cishu);
return Tuple4.of(tp.f2,tp.f1,ren.size(),cishu);
}
}