Flink CDC实现一个Job同步多个表

直接使用Flink CDC SQL的写法,一个Job只能同步一个表的数据,至于原因,在此不再赘述。

直接上代码吧

第一步,自定义 DebeziumDeserializationSchema

将SourceRecord类转化为自定义的JsonRecord类型

public class JsonStringDebeziumDeserializationSchema
        implements DebeziumDeserializationSchema<JsonRecord> {

    @Override
    public void deserialize(SourceRecord record, Collector<JsonRecord> out) throws Exception {
        Envelope.Operation op = Envelope.operationFor(record);
        Struct value = (Struct) record.value();
        Schema valueSchema = record.valueSchema();
        String tableName = record.topic();
        //out.collect("source table name is :" + tableName);
        if (op == Envelope.Operation.CREATE || op == Envelope.Operation.READ) {
            String insertMapString = extractAfterRow(value, valueSchema);
            JsonRecord jsonRecord = new JsonRecord(tableName, "i", insertMapString);
            out.collect(jsonRecord);
        } else if (op == Envelope.Operation.DELETE) {
            String deleteString = extractBeforeRow(value, valueSchema);
            JsonRecord jsonRecord = new JsonRecord(tableName, "d", deleteString);
            out.collect(jsonRecord);
        } else if (op == Envelope.Operation.UPDATE) {
            String updateString = extractAfterRow(value, valueSchema);
            JsonRecord jsonRecord = new JsonRecord(tableName, "u", updateString);
            out.collect(jsonRecord);
        }
    }

    @Override
    public TypeInformation<JsonRecord> getProducedType() {
        return TypeInformation.of(new TypeHint<JsonRecord>(){});
    }


    private String extractAfterRow(Struct value, Schema valueSchema) throws Exception {
        Struct after = value.getStruct(Envelope.FieldName.AFTER);
        Schema afterSchema = valueSchema.field(Envelope.FieldName.AFTER).schema();
        Map<String, Object> map = getRowMap(after, afterSchema);
        ObjectMapper objectMapper = new ObjectMapper();
        return  objectMapper.writeValueAsString(map);
    }

    private String extractBeforeRow(Struct value, Schema valueSchema)
            throws Exception {
        Struct beforeValue = value.getStruct(Envelope.FieldName.BEFORE);
        Schema beforeSchema = valueSchema.field(Envelope.FieldName.BEFORE).schema();
        Map<String, Object> map =  getRowMap(beforeValue, beforeSchema);
        ObjectMapper objectMapper = new ObjectMapper();
        return  objectMapper.writeValueAsString(map);
    }

    private Map<String, Object> getRowMap(Struct value, Schema valueSchema) {
        Map<String, Object> map = new HashMap<>();
        for (Field field : valueSchema.fields()) {
            map.put(field.name(), value.get(field.name()));
        }
        return map;
    }

JsonRecord类定义如下:

@Data
public class JsonRecord {
    private String tableName;
    private String op;
    private String fieldValue;
}

其中fieldValue为字段map序列化后的字符串

第二步,构建DebeziumSourceFunction

public class OracleDebeziumFunctionBuilder {

    public DebeziumSourceFunction build(OracleConnectionOption option) {

        OracleSource.Builder builder = OracleSource.builder();
        builder.hostname(option.getHostName());

        builder.port(option.getPort());

        builder.username(option.getUserName());
        builder.password(option.getPassword());

        builder.database(option.getDatabaseName());

        String[] tableArray = new String[option.getTableNames().size()];
        int count = 0;
        for (String tableName : option.getTableNames()) {
            tableArray[count] = option.getSchemaName() + "." + tableName;
            count++;
        }

        String[] schemaArray = new String[]{option.getSchemaName()};
        builder.tableList(tableArray);
        builder.schemaList(schemaArray);

        // dbzProperties
        Properties dbzProperties = new Properties();
        dbzProperties.setProperty("database.tablename.case.insensitive", "false");
        if (option.isUseLogmine()) {
            dbzProperties.setProperty("log.mining.strategy", "online_catalog");
            dbzProperties.setProperty("log.mining.continuous.mine", "true");
        } else {
            dbzProperties.setProperty("database.connection.adpter", "xstream");
            dbzProperties.setProperty("database.out.server.name", option.getOutServerName());
        }
        builder.debeziumProperties(dbzProperties);
        builder.deserializer(new JsonStringDebeziumDeserializationSchema());
        builder.startupOptions(option.getStartupOption());
        return builder.build();
    }
}
OracleConnectionOption类定义如下:
public class OracleConnectionOption {

    private String hostName;
    private int port;
    private String databaseName;
    private String userName;
    private String password;

    /** 是否支持logmine */
    private boolean useLogmine;

    private String outServerName;

    private List<String> tableNames;

    private String schemaName;

    private StartupOptions startupOption;
}

第三步,编写main函数

通过OutputTag实现分流

public class CdcStartup {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.disableOperatorChaining();
        OracleConnectionOption connectionOption = new OracleConnectionOption();
        connectionOption.setHostName(...);
        connectionOption.setPort(...);
        connectionOption.setDatabaseName(...);
        connectionOption.setUserName(...);
        connectionOption.setPassword(...);
        connectionOption.setUseLogmine(...);
        connectionOption.setOutServerName(...);
        connectionOption.setSchemaName(...);
        connectionOption.setStartupOption(StartupOptions.initial());
        List<String> tableNames =new ArrayList();
        // 添加要同步的表名;
        tableNames.add("") ;
        OracleDebeziumFunctionBuilder functionBuilder = new OracleDebeziumFunctionBuilder();
        DebeziumSourceFunction sourceFunction = functionBuilder.build(connectionOption);
        DataStreamSource<JsonRecord> dataStreamSource = env.addSource(sourceFunction );
        //sink
        Map<String, OutputTag<JsonRecord>> outputTagMap = new HashMap<>();
        for (String tableName : tableNames) {
            outputTagMap.put(tableName , new OutputTag(tableName, TypeInformation.of(JsonRecord.class)));
        }

        SingleOutputStreamOperator mainStream = dataStreamSource.process(new ProcessFunction<JsonRecord, Object>() {
            @Override
            public void processElement(JsonRecord value, Context ctx, Collector<Object> out) throws Exception {
                int index = value.getTableName().lastIndexOf(".");
                String originalName= value.getTableName().substring(index + 1);
                ctx.output(outputTagMap.get(originalName), value);
            }
        });

       for (String tableName : tableNames) {
            
            DataStream outputStream = mainStream.getSideOutput(outputTagMap.get(tableName));
            CustomSinkFunction sinkFunction = new CustomSinkFunction ();//自定义sink
            outputStream.addSink(sinkFunction).name(tableName);
        }
        env.execute();
    }
}

  • 8
    点赞
  • 20
    收藏
    觉得还不错? 一键收藏
  • 4
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值