先上代码:
import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.TableResult; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.types.Row; /** * @program: flink-tech * @description: 1.11情况下的sql * @author: Mr.Wang * @create: 2020-07-13 09:41 **/ public class TableDemo { /** * 官网API网址:https://ci.apache.org/projects/flink/flink-docs-release-1.11/zh/dev/table/connectors/kafka.html#how-to-create-a-kafka-table * *所有参数: * connector * topic * properties.bootstrap.servers * properties.group.id * format * scan.startup.mode * scan.startup.specific-offsets * scan.startup.timestamp-millis * sink.partitioner * */ private static final String KAFKA_SQL = "CREATE TABLE kafkaTable (\n" + " code STRING," + " total_emp INT" + ") WITH (" + " 'connector' = 'kafka'," + " 'topic' = 'flink_dwd_test1'," + " 'properties.bootstrap.servers' = 'local:9092'," + " 'properties.group.id' = 'test1'," + " 'format' = 'json'," + " 'scan.startup.mode' = 'earliest-offset'" + ")"; public static void main(String[] args) throws Exception { //bink table StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment(); EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build(); StreamTableEnvironment bsTableEnv = StreamTableEnvironment.create(bsEnv, bsSettings); TableResult tableResult = bsTableEnv.executeSql(KAFKA_SQL); Table table = bsTableEnv.sqlQuery("select * from kafkaTable"); DataStream<Row> dsRow = bsTableEnv.toAppendStream(table, Row.class); dsRow.print(); // tableResult.print(); // bsTableEnv.execute("aaa"); bsEnv.execute("aa"); } }
pom依赖:
这里我要多说一点,我开始的时候安装官网的依赖导入执行一直报错,在官网群问过很多次,后来还是通过白斩鸡确认之后,基础代码没问题,是依赖问题&#