<?xml version="1.0" encoding="UTF-8"?><project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"><modelVersion>4.0.0</modelVersion><groupId>org.apache.flink</groupId><artifactId>flink-quickstart-java</artifactId><version>1.0-SNAPSHOT</version><properties><maven.compiler.source>11</maven.compiler.source><maven.compiler.target>11</maven.compiler.target><flink.version>1.17.1</flink.version><hadoop.version>3.3.6</hadoop.version></properties><dependencies><dependency><groupId>org.apache.paimon</groupId><artifactId>paimon-flink-1.17</artifactId><version>0.5.0-incubating</version></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-table-api-java-bridge</artifactId><version>1.17.0</version></dependency><!-- https://mvnrepository.com/artifact/org.apache.flink/flink-table-planner --><dependency><groupId>org.apache.flink</groupId><artifactId>flink-table-planner_2.12</artifactId><version>${flink.version}</version></dependency><!-- https://mvnrepository.com/artifact/org.apache.flink/flink-table-runtime --><dependency><groupId>org.apache.flink</groupId><artifactId>flink-table-runtime</artifactId><version>${flink.version}</version></dependency><!-- https://mvnrepository.com/artifact/org.apache.flink/flink-sql-client --><dependency><groupId>org.apache.flink</groupId><artifactId>flink-sql-client</artifactId><version>${flink.version}</version></dependency><!-- hadoop begin --><!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client --><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>${hadoop.version}</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>${hadoop.version}</version><exclusions><exclusion><groupId>org.apache.avro</groupId><artifactId>avro</artifactId></exclusion><exclusion><groupId>log4j</groupId><artifactId>log4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-log4j12</artifactId></exclusion><exclusion><groupId>jdk.tools</groupId><artifactId>jdk.tools</artifactId></exclusion></exclusions></dependency><!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs --><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>${hadoop.version}</version><exclusions><exclusion><groupId>org.apache.avro</groupId><artifactId>avro</artifactId></exclusion><exclusion><groupId>log4j</groupId><artifactId>log4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-log4j12</artifactId></exclusion></exclusions></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs-client</artifactId><version>${hadoop.version}</version><exclusions><exclusion><groupId>org.apache.avro</groupId><artifactId>avro</artifactId></exclusion><exclusion><groupId>log4j</groupId><artifactId>log4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-log4j12</artifactId></exclusion></exclusions></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-mapreduce-client-core</artifactId><version>${hadoop.version}</version><exclusions><exclusion><groupId>com.google.protobuf</groupId><artifactId>protobuf-java</artifactId></exclusion><exclusion><groupId>ch.qos.reload4j</groupId><artifactId>reload4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-reload4j</artifactId></exclusion><exclusion><groupId>log4j</groupId><artifactId>log4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-log4j12</artifactId></exclusion><exclusion><groupId>jdk.tools</groupId><artifactId>jdk.tools</artifactId></exclusion></exclusions></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client-api</artifactId><version>${hadoop.version}</version><exclusions><exclusion><groupId>com.google.protobuf</groupId><artifactId>protobuf-java</artifactId></exclusion><exclusion><groupId>ch.qos.reload4j</groupId><artifactId>reload4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-reload4j</artifactId></exclusion><exclusion><groupId>log4j</groupId><artifactId>log4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-log4j12</artifactId></exclusion><exclusion><groupId>jdk.tools</groupId><artifactId>jdk.tools</artifactId></exclusion></exclusions></dependency></dependencies><build><plugins><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-shade-plugin</artifactId><version>3.1.1</version><executions><execution><phase>package</phase><goals><goal>shade</goal></goals><configuration><artifactSet><excludes><exclude>com.google.code.findbugs:jsr305</exclude></excludes></artifactSet><filters><filter><!--Do not copy the signatures in the META-INFfolder.
Otherwise,this might cause SecurityExceptions when using the JAR.--><artifact>*:*</artifact><excludes><exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<!-- Replace this with the main class of your job -->
<mainClass>my.programs.main.clazz</mainClass>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
代码
packagepaimon;importorg.apache.flink.api.common.typeinfo.Types;importorg.apache.flink.streaming.api.datastream.DataStream;importorg.apache.flink.streaming.api.environment.StreamExecutionEnvironment;importorg.apache.flink.table.api.DataTypes;importorg.apache.flink.table.api.Schema;importorg.apache.flink.table.api.Table;importorg.apache.flink.table.api.bridge.java.StreamTableEnvironment;importorg.apache.flink.types.Row;importorg.apache.flink.types.RowKind;publicclassWriteToTable{publicstaticvoidmain(String[] args)throwsException{// create environments of both APIsStreamExecutionEnvironment env =StreamExecutionEnvironment.getExecutionEnvironment();StreamTableEnvironment tableEnv =StreamTableEnvironment.create(env);// create a changelog DataStreamDataStream<Row> dataStream =
env.fromElements(Row.ofKind(RowKind.INSERT,"Alice",12),Row.ofKind(RowKind.INSERT,"Bob",5),Row.ofKind(RowKind.UPDATE_BEFORE,"Alice",12),Row.ofKind(RowKind.UPDATE_AFTER,"Alice",100)).returns(Types.ROW_NAMED(newString[]{"name","age"},Types.STRING,Types.INT));// interpret the DataStream as a TableSchema schema =Schema.newBuilder().column("name",DataTypes.STRING()).column("age",DataTypes.INT()).build();Table table = tableEnv.fromChangelogStream(dataStream, schema);// create paimon catalog
tableEnv.executeSql("CREATE CATALOG paimon WITH ('type' = 'paimon', 'warehouse'='file:///Users/xxxxx/share_dir/paimon')");// tableEnv.executeSql("CREATE CATALOG paimon WITH ('type' = 'paimon', 'warehouse'='hdfs://xxxxxx:8020/paimon')");
tableEnv.executeSql("USE CATALOG paimon;");// register the table under a name and perform an aggregation
tableEnv.createTemporaryView("InputTable", table);
tableEnv.executeSql("CREATE TABLE IF NOT EXISTS sink_paimon_table (\n"+" name STRING PRIMARY KEY NOT ENFORCED,\n"+" age INT\n"+")");// insert into paimon table from your data stream table
tableEnv.executeSql("INSERT INTO sink_paimon_table SELECT * FROM InputTable");// env.execute("paimon demo1");}}