准备流,实时批量写入hbase:
package application_spark.hbase; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.*; import java.io.IOException; import java.util.List; public class HbaseSink extends RichSinkFunction<List<Put>> { private org.apache.hadoop.conf.Configuration configuration; private Connection connection = null; private BufferedMutator userMutator; private BufferedMutator scencesMutator; @Override public void open(Conf