在pgsql里执行以下语句插入一条记录
INSERT INTO "public"."table_name" ("id_c", "json_c", "uuid_c", "xml_c", "int_c", "money_c", "jsonb_c", "numeric_c", "boolean_c", "date_c", "timestamp_c", "timestamptz_c", "time_c", "text_c", "char_c", "varchar_c") VALUES ('has_value', '{"id_c":"null_value11","json_c":null,"uuid_c":null,"xml_c":null,"int_c":null,"money_c":null,"jsonb_c":null,"numeric_c":null,"boolean_c":null,"date_c":null,"timestamp_c":null,"timestamptz_c":null,"time_c":null,"text_c":null,"char_c":null,"varchar_c":null}', '00000000-0000-0000-0000-000000000000', '<xml><node>abc</node></xml>', 3, '3.14', '{"testfield": "jsonb_test"}', 1.23456789, true, '2020-01-06', '2019-01-06 13:37:16.000000', '2021-01-06 13:37:25.404000', '06:18:21', 'a very long test', 'c', 'vc')
然后flink中获取变更后的数据
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties properties=new Properties();
properties.setProperty("bootstrap.servers","192.168.1.191:9092");
properties.setProperty("group.id","flink");
FlinkKafkaConsumer010<String> consumer= new FlinkKafkaConsumer010<>("postgres.public.table_name", new SimpleStringSchema(), properties);
// consumer.setStartFromEarliest();
DataStream<String> stream=env.addSource(consumer);
stream.map(
s-> StringUtils.substringBetween(s,"\"after\":",",\"source\":")
).print();
// execute program
env.execute("Debezium");
}
注意上面的setStartFromEarliest并不能让代码消费kafka对应topic里建立这个group之前的消息,但是对于这个group里之前消费过的消息还是可以再消费一遍的。
答应出来的json格式化之后如下
{
"id_c": "has_value",
"json_c": "{\"id_c\":\"null_value11\",\"json_c\":null,\"uuid_c\":null,\"xml_c\":null,\"int_c\":null,\"money_c\":null,\"jsonb_c\":null,\"numeric_c\":null,\"boolean_c\":null,\"date_c\":null,\"timestamp_c\":null,\"timestamptz_c\":null,\"time_c\":null,\"text_c\":null,\"char_c\":null,\"varchar_c\":null}",
"uuid_c": "00000000-0000-0000-0000-000000000000",
"xml_c": "<xml><node>abc</node></xml>",
"int_c": 3,
"money_c": "ATo=",
"jsonb_c": "{\"testfield\": \"jsonb_test\"}",
"numeric_c": {
"scale": 8,
"value": "B1vNFQ=="
},
"boolean_c": true,
"date_c": 18267,
"timestamp_c": 1546781836000000,
"timestamptz_c": "2021-01-06T13:37:25.404Z",
"time_c": 22701000000,
"text_c": "a very long test",
"char_c": "c ",
"varchar_c": "vc"
}
- money_c
这个是存储的内部值,实际上我存的是3.14,不知道这个还能还原不 - numeric_c
这个看上去也是内部值,看上去也无法还原 - date_c
这个是距离1970-01-01的天数,可以通过下面java代码看出来System.out.println(LocalDate.now().minusDays(18267));
- char_c
注意这个有补全