import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import java.nio.charset.StandardCharsets;
/**
* Author:panghu
* Date:2022-05-29
* Description: 自定义source写入HBase
*/
public class _19SinkCustomtoHBase {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DataStreamSource<String> streamSource = env.fromElements("hello", "world");
streamSource.addSink(
new RichSinkFunction<String>() {
//管理HBase的配置信息
//因为类重名的问题,将类的完整路径导入
org.apache.hadoop.conf.Configuration configuration;
Connection connection;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
// HBase的链接参数
configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", "hadoop102:2181");
connection = ConnectionFactory.createConnection(configuration);
}
@Override
public void close() throws Exception {
super.close();
connection.close(); //关闭链接
}
@Override
public void invoke(String value, Context context) throws Exception {
Table tb = connection.getTable(TableName.valueOf("test")); // 表名
Put put = new Put("rowkey".getBytes(StandardCharsets.UTF_8)); // 指定rowkey
put.addColumn("info".getBytes(StandardCharsets.UTF_8), // 列名
value.getBytes(StandardCharsets.UTF_8), // 写入的数据
"1".getBytes(StandardCharsets.UTF_8) // 写入的数据
);
// 执行put操作
tb.put(put);
// 关闭表
tb.close();
}
}
);
env.execute();
}
}
Flink自定义Sink写入到HBase
最新推荐文章于 2024-01-26 18:26:28 发布