前面写了个Flink操作Mysql的示例,Scala版本的,不过是对照源码的翻译。这两天使用Java开发(boss说不要用scala),这里贴一下使用方法。超简单的,大家注意看下代码。
mysql相关的依赖(kafka的自行添加)
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table_2.11</artifactId>
<version>1.7.2</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-jdbc_2.11</artifactId>
<version>1.7.2</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.47</version>
</dependency>
maven上目前还下载不到 flink-jdbc_2.11,这个可以自己下载源码打一个,放到自己的仓库中。懒得打的可以使用我这个。注意要放到正确的路径下。
链接:https://pan.baidu.com/s/1NQabicU62k0JMgP8TcH0xQ 提取码:dg02
package com.maxiu.test;
import com.alibaba.fastjson.JSON;
import com.maxiu.test.constant.JDBCConstant;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.io.jdbc.JDBCAppendTableSink;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.types.Row;
import java.util.Properties;
/**
* @author xianghu.wang
* @time 2019/4/1
* @description
*/
public class BankAccount{
// 数据类型,按顺序,与数据库字段保持一致
private static final TypeInformation[] FIELD_TYPES = new TypeInformation[]{
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO
};
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//设定检查点
env.enableCheckpointing(5000);
//设定eventTime
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
// kafka消费者参数
Properties consumerProps = new Properties();
consumerProps.setProperty("bootstrap.servers", "localhost:9092");
consumerProps.setProperty("group.id", "test");
consumerProps.setProperty("enable.auto.commit", "true");
FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>("topicA", new SimpleStringSchema(), consumerProps);
// 接收kafka中的参数
DataStream<String> kafkaData = env.addSource(consumer);
// 将JSON字符串解析成Row对象
DataStream<Row> accountLatest = kafkaData
.map(x -> JSON.parseObject(x))
.map(x -> Row.of(x.getString("name"),
x.getInteger("age"),
x.getDouble("account"),
System.currentTimeMillis()));
// 拼接sql
String upsertSql = "INSERT INTO bank_account (name,age,account,update_time) values (?,?,?,?) ON DUPLICATE KEY UPDATE " +
"age=VALUES(age),account=VALUES(account),update_time=VALUES(update_time)";
// 写出到数据库 (CRUD都可以、主要看sql是什么,感兴趣的可以自己测试)
JDBCAppendTableSink sink = JDBCAppendTableSink.builder()
.setDrivername("com.mysql.jdbc.Driver")
.setDBUrl("jdbc:mysql://localhost:3306/testdb?characterEncoding=utf8&useSSL=true")
.setUsername("root")
.setPassword("123456")
.setQuery(upsertSql)
.setParameterTypes(FIELD_TYPES)
.build();
sink.emitDataStream(accountLatest);
// 执行
env.execute("bank account");
}
}