----------直接上代码-----------------
package com.hx.test;
import com.hx.conf.HBaseConfig;
import com.hx.test.model.IdTimeTemperature;
import com.hx.utils.HBaseUtil;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.hbase.sink.HBaseSinkFunction;
import org.apache.flink.connector.hbase.sink.LegacyMutationConverter;
import org.apache.flink.connector.hbase.util.HBaseTableSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.types.Row;
import org.apache.hadoop.conf.Configuration;
import java.util.Arrays;
import static com.hx.utils.ExecutionEnvUtil.createParameterTool;
public class HbaseSink {
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(2, 1000));
System.setProperty("java.security.auth.login.config","D:\\workspace\\flink2doris\\src\\main\\resources\\kafka_client_jaas.conf");
System.setProperty("java.security.krb5.conf","D:\\workspace\\flink2doris\\src\\main\\resources\\krb5.ini");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
DataStream<String> source = env.readTextFile("src/main/resources/id-time-temperature");
SingleOutputStreamOperator<Tuple3<String, String, String>> dataT = source.filter(line -> line.length()>0).map(line -> {
String[] split = line.split(",");
return Tuple3.of(split[0], split[1], split[2]);
}).returns(Types.TUPLE(Types.STRING, Types.STRING, Types.STRING));
ParameterTool parameterTool = createParameterTool();
String zkQuorum = parameterTool.getRequired("hbase.zookeeper.quorum");
String zkPort = parameterTool.get("hbase.zookeeper.port", "2181");
HBaseConfig build = HBaseConfig.builder()
.zookeeper_quorum(zkQuorum)
.zookeeper_port(zkPort)
.zookeeper_znode("/hbase")
.isKerberos(true)
.build();
Configuration hConfig = HBaseUtil.initHBaseClientConfig(build);
HBaseTableSchema hBaseTableSchema = new HBaseTableSchema();
hBaseTableSchema.setRowKey("rowKey", String.class);
Arrays.stream(IdTimeTemperature.schemas).forEach(column ->{
hBaseTableSchema.addColumn("cf1",column,String.class);
});
LegacyMutationConverter legacyMutationConverter = new LegacyMutationConverter(hBaseTableSchema);
HBaseSinkFunction<Tuple2<Boolean, Row>> tuple2HBaseSinkFunction = new HBaseSinkFunction<Tuple2<Boolean, Row>>(
"testzs:test",
hConfig,
legacyMutationConverter,
10 * 1024,
10 * 1024,
1000
);
DataStream<Tuple2<Boolean, Row>> tuple2Result = dataT.map(new DataToHbaseTypeFuncation())
.name("format vehicle trip").uid("format vehicle trip").rebalance();
tuple2Result.addSink(tuple2HBaseSinkFunction).name("sink to hbase").uid("sink to hbase");
env.execute();
}
public static class DataToHbaseTypeFuncation extends RichMapFunction<Tuple3<String, String, String>,Tuple2<Boolean, Row>> {
@Override
public Tuple2<Boolean, Row> map(Tuple3<String, String, String> value) throws Exception {
String id = value.f0;
String time = value.f1;
String temperature = value.f2;
String rowKey = id +"_"+ time;
Row resultRow = Row.of(rowKey, Row.of(
id,
time,
temperature
));
return Tuple2.of(true,resultRow);
}
}
}
---------------------------------------------------------------------
package com.hx.utils;
import com.hx.conf.HBaseConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.security.UserGroupInformation;
public class HBaseUtil {
/**
* hbase 连接和配置信息
*
* @return
*/
public static Configuration initHBaseClientConfig(HBaseConfig hBaseConfig) throws Exception{
// 连接HBase需要的参数
Configuration conf = HBaseConfiguration.create();
String zookeeper_quorum = hBaseConfig.getZookeeper_quorum();
conf.set("hbase.zookeeper.quorum", "cdp-kfk01.hx.tc,cdp-kfk02.hx.tc,cdp-kfk03.hx.tc");
conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, "2181"); //2181
conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/hbase"); //hbase
// 认证需要的参数
// hbase authentication is kerberos ,rpc is privacy
//
conf.set("hadoop.security.authentication", "kerberos");
conf.set("hbase.security.authentication","kerberos");
conf.set("hbase.rpc.protection", "privacy");
// 指定kerberos配置参数
conf.set("keytab.file", "D:/soft/kerbros/hbase.keytab");
conf.set("hbase.master.kerberos.principal", "hbase/_HOST@hx.TC");
conf.set("hbase.regionserver.kerberos.principal", "hbase/_HOST@hx.TC");
conf.set("kerberos.principal", "hbase/cdp-master01.hx.tc@hx.TC");
// 设置配置文件信息
UserGroupInformation.setConfiguration(conf);
// 通过keytab登录用户
System.out.println("--------------> 开始进行keytab认证 <--------------");
UserGroupInformation.loginUserFromKeytab("hbase/cdp-master01.hx.tc@hx.TC", "D:/soft/kerbros/hbase.keytab");
System.out.println("--------------> keytab认证结束 <--------------");
return conf;
}
/**
* @param zkQuorum zookeeper地址,多个要用逗号分隔
* @param port zookeeper端口号
* @return connection
*/
public static Connection getConnection(String zkQuorum, int port) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", zkQuorum);
conf.set("hbase.zookeeper.property.clientPort", port + "");
Connection connection = ConnectionFactory.createConnection(conf);
return connection;
}
/**
* get schema
*
* @param schemas
*/
// public static HBaseTableSchema getHbaseSechema(String[] schemas, String cf) {
// HBaseTableSchema hBaseTableSchema = new HBaseTableSchema();
// hBaseTableSchema.setRowKey("rowKey", String.class);
// Arrays.stream(schemas).forEach(column -> {
// hBaseTableSchema.addColumn(cf, column, String.class);
// });
// return hBaseTableSchema;
//
// }
/**
* get realm from zookeeper quorum
* example:
* zookeeper quorum is "hadoopqa01nn01.jqdev.shanghaigm.com,hadoopqa03nn01.jqdev.shanghaigm.com,hadoopqa02en01.jqdev.shanghaigm.com"
* return JQDEV.SHANGHAIGM.COM
*
* @param hosts
* @return
*/
public static String getRealm(String hosts) {
String realm = "JQDEV.SHANGHAIGM.COM";
String[] hostArray = hosts.split(",");
realm = hostArray[0].substring(hostArray[0].indexOf(".") + 1, hostArray[0].length()).toUpperCase();
return realm;
}
public static void main(String[] args) {
String hosts = "jqdev-l-02479.jqdev.shanghaigm.com,jqdev-l-02481.jqdev.shanghaigm.com,jqdev-l-02480.jqdev.shanghaigm.com";
String realm = getRealm(hosts);
System.out.println(realm);
}
}
----------------------------pom 文件同上篇-读hbase篇---------------------------