1.pom.xml文件需要引入下面包
<properties>
<flink.version>1.15.4</flink.version>
<hudi.version>0.13.1</hudi.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_2.12</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-files</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-flink1.15-bundle</artifactId>
<version>${hudi.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-flink-client</artifactId>
<version>0.14.1</version>
</dependency>
</dependencies>
2.java代码如下
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.junit.Test;
import java.util.List;
public class HudiTest {
@Test
public void test01() throws Exception {
StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(streamEnv);
tableEnv.executeSql("CREATE TABLE IF NOT EXISTS table_name (\n" +
" resume_id bigint,\n" +
" update_by STRING,\n" +
" gmt_modified Timestamp ,\n" +
" del_flag int, \n" +
" invite_operation_date STRING,\n" +
" induct_date date ,\n" +
" leave_date date ,\n" +
" PRIMARY KEY (resume_id) NOT ENFORCED\n" +
") with (\n" +
" 'connector' = 'hudi',\n" +
" 'path' = 'hdfs://177.17.17.200:8088/user/hudi/resume_demo/hr_resume',\n" +
" 'table.type' = 'MERGE_ON_READ'\n" +
")");
Table table = tableEnv.sqlQuery("select * from table_name");
DataStream<Row> dataStream = tableEnv.toDataStream(table);
streamEnv.execute();
List<Row> rows = dataStream.executeAndCollect(100);
for (Row row : rows) {
StringBuilder rowString = new StringBuilder();
for (int i = 0; i < row.getArity(); i++) {
rowString.append(row.getField(i)).append("|");
}
System.out.println(rowString.toString());
}
}
}
3.说明
经过测试,这里的sql中,支持下面的一些sql
简单where条件
limit 10 offset 0
不支持
order by