Flink将数据落地Hbase
Flink主类
package flink.sink2hbase;
import flink.sink2hbase.deserialization.JsonDeserializationSchema;
import flink.sink.HbaseSinkFunction;
import flink.sink2hbase.map.HTableBaseMap;
import flink.sink2hbase.table.UserHTable;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import flink.sink2hbase.pojo.User;
import util.BeanUtil;
import util.Property;
import java.util.Properties;
public class FlinkSinkHbase {
private static OutputTag<UserHTable> userOutputTag = new OutputTag<>("用户表", TypeInformation.of(UserHTable.class));
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
Properties prop = Property.getKafkaProperties();
prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"g1");
FlinkKafkaConsumer<User> consumer = new FlinkKafkaConsumer<>("test",new JsonDeserializationSchema<>(User.class),prop);
DataStreamSource<User> mainStream = env.addSource(consumer);
SingleOutputStreamOperator<User> dataStream = mainStream
.process(new ProcessFunction<User, User>() {