– 设置发布为true
update pg_publication set puballtables=true where pubname is not null;
– 把所有表进行发布
CREATE PUBLICATION dbz_publication FOR ALL TABLES;
– 查询哪些表已经发布
select * from pg_publication_tables;
4、更改表的复制标识包含更新和删除的值
– 更改复制标识包含更新和删除之前值
ALTER TABLE xxxxxx REPLICA IDENTITY FULL;
– 查看复制标识(为f标识说明设置成功)
select relreplident from pg_class where relname=‘xxxxxx’;
二、Flink读取PG数据
1、加载依赖
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-postgres-cdc</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.3.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_${scala.version}</artifactId>
<version>${flink.version}</version>
<exclusions>
<exclusion>
<artifactId>kafka-clients</artifactId>
<groupId>org.apache.kafka</groupId>
</exclusion>
</exclusions>
</dependency>
注意:如果依赖中有flink-connector-kafka,可能会有冲突,需要手动排除冲突
2、使用Flink CDC创建pg的source
import com.ververica.cdc.connectors.postgres.PostgreSQLSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
public static SourceFunction getPGSource(String database, String schemaList,String tableList, String slotName) {
Properties properties = new Properties();
properties.setProperty("snapshot.mode", "always"); //always:全量+增量 never:增量
properties.setProperty("debezium.slot.name", "pg_cdc");
//在作业停止后自动清理 slot