1.流表
DataStream dataStream = streamExecutionEnvironment
.socketTextStream("localhost", 9999).map(x -> {
return Integer.parseInt(x);
});
2.mysql 维表
CREATE TABLE `source5` (
`id` int(11) NOT NULL,
`user_name` varchar(19) DEFAULT NULL,
`age` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
)
TypeInformation>[] fieldTypes = new TypeInformation>[]{
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO
};
RowTypeInfo rowTypeInfo = new RowTypeInfo(fieldTypes);
JDBCInputFormat jdbcInputFormat = JDBCInputFormat.buildJDBCInputFormat()
.setDrivername("com.mysql.jdbc.Driver")
.setDBUrl("jdbc:mysql://localhost/test1")
.setUsername("root")
.setPassword("123456")
.setQuery("select * from source5")
.setRowTypeInfo(rowTypeInfo)
.finish();
StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment stableEnv = new StreamTableEnvironment(streamExecutionEnvironment, TableConfig.DEFAULT());
DataStreamSource dataSource1 = streamExecutionEnvironment.createInput(jdbcInputFormat);
3.join
stableEnv.registerDataStream("mysql", dataSource1, "id1,name,age");
stableEnv.registerDataStream("stream1", dataStream, "id2");
// stableEnv.sqlQuery("update mysql set user_name='b where id1 =410000");
Table mysql = stableEnv.sqlQuery("select * from mysql");
Table stream1 = stableEnv.scan("stream1");
Table joined = stream1.join(mysql, "id1=id2");
DataStream res = stableEnv.toAppendStream(joined, Row.class);
res.addSink(new SinkFunction() {
@Override
public void invoke(Row value) throws Exception {
logger.info(value.toString());
}
});
streamExecutionEnvironment.execute("Window WordCount");
4.在终端执行 nc -lk 9999,即可输入流数据,实现流数据与mysql数据的join
5.依赖
UTF-8
1.7.0
1.8
2.11
${java.version}
${java.version}
org.apache.flink
flink-java
${flink.version}
org.apache.flink
flink-scala_${scala.binary.version}
${flink.version}
org.apache.flink
flink-jdbc_${scala.binary.version}
${flink.version}
org.apache.flink
flink-streaming-java_${scala.binary.version}
${flink.version}
provided
org.apache.flink
flink-table_${scala.binary.version}
1.7.0
org.apache.flink
flink-connector-kafka-0.10_${scala.binary.version}
${flink.version}
mysql
mysql-connector-java
5.1.46
org.slf4j
slf4j-log4j12
1.7.7
runtime
log4j
log4j
1.2.17
runtime