flink链接mysql_flink连接postgresql数据库

写入postgresql数据库

package jdbc.psql.csvtopsql;

import org.apache.flink.api.common.functions.FlatMapFunction;

import org.apache.flink.api.java.tuple.Tuple2;

import org.apache.flink.streaming.api.datastream.DataStream;

import org.apache.flink.streaming.api.datastream.DataStreamSource;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import org.apache.flink.util.Collector;

public class CSVSource {

public static void main(String[] args) throws Exception {

StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();

env.setParallelism(1);

DataStreamSourcetext = env.readTextFile("C:\\Users\\Administrator\\Desktop\\资料\\flink\\test.txt");

DataStream> counts = text.flatMap(new Tokenizer())

.keyBy(0)

.sum(1);

counts.writeAsCsv("C:\\Users\\Administrator\\Desktop\\资料\\flink\\csv.txt");

counts.print();

counts.addSink(new PgsqlSink());

env.execute("txt write to the psql demo");

}

public static class Tokenizer implements FlatMapFunction> {

@Override

public void flatMap(String value, Collector> out){

String[] tokens = value.toLowerCase().split("\\W+");

for (String token:tokens){

if (token.length()>0){

out.collect(new Tuple2(token,1));

}

}

}

}

}

package jdbc.psql.csvtopsql;

import org.apache.flink.api.java.tuple.Tuple2;

import org.apache.flink.configuration.Configuration;

import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.PreparedStatement;

public class PgsqlSink extends RichSinkFunction> {

private static final long serialVersionUID = 1L;

private Connection connection;

private PreparedStatement preparedStatement;

@Override

public void open(Configuration parameters) throws Exception {

//JDBC连接信息

String USERNAME = "postgres";

String PASSWORD = "passwd";

String driverClass = "org.postgresql.Driver";

String URL = "jdbc:postgresql://192.168.108.01:5432/flink";

//加载jdbc的驱动

Class.forName(driverClass);

//获取数据库的连接

connection = DriverManager.getConnection(URL, USERNAME, PASSWORD);

String sql = "insert into flink(word, num) values (?,?)";

preparedStatement = connection.prepareStatement(sql);

super.open(parameters);

}

@Override

public void invoke(Tuple2value, Context context) {

try {

String word = value.f0;

Integer num = value.f1;

preparedStatement.setString(1, word);

preparedStatement.setInt(2, num);

preparedStatement.executeUpdate();

} catch (Exception e) {

e.printStackTrace();

}

}

@Override

public void close() throws Exception {

if (preparedStatement != null) {

preparedStatement.close();

}

if (connection != null) {

connection.close();

}

super.close();

}

}

读取postgresql数据库

package jdbc.psql.psqlSource;

import jdbc.psql.Word;

import org.apache.flink.configuration.Configuration;

import org.apache.flink.streaming.api.functions.source.RichSourceFunction;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.PreparedStatement;

import java.sql.ResultSet;

public class PsqlSource extends RichSourceFunction{

private static final long serialVersionUID = 1L;

private Connection connection;

private PreparedStatement preparedStatement;

@Override

public void open(Configuration parameters) throws Exception {

super.open(parameters);

String USERNAME = "postgres";

String PASSWORD = "passwd";

String driverClass = "org.postgresql.Driver";

String URL = "jdbc:postgresql://192.168.108.01:5432/flink";

Class.forName(driverClass);

connection = DriverManager.getConnection(URL, USERNAME, PASSWORD);

String sql = " SELECT * FROM public.flink ";

preparedStatement = connection.prepareStatement(sql);

}

@Override

public void run(SourceContextsourceContext) throws Exception {

try {

ResultSet resultSet = preparedStatement.executeQuery();

while (resultSet.next()) {

Word word = new Word();

word.setId(resultSet.getInt("id"));

word.setWord(resultSet.getString("word"));

word.setNum(resultSet.getInt("num"));

sourceContext.collect(word);

}

} catch (Exception e) {

e.printStackTrace();

}

}

@Override

public void cancel() {

}

@Override

public void close() throws Exception {

super.close();

if (connection != null) {

connection.close();

}

if (preparedStatement != null) {

preparedStatement.close();

}

}

}

package jdbc.psql.psqlSource;

import jdbc.psql.Word;

import org.apache.flink.streaming.api.datastream.DataStream;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class WordSourceFromPsql {

public static void main(String[] args) throws Exception {

StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

env.getConfig().disableSysoutLogging();

DataStreamstream = env.addSource(new PsqlSource());

stream.print();

env.execute("PostGreSQL Source to Flink demo");

}

}

package jdbc.psql;

public class Word {

private int id;

private String word;

private int num;

public Word(int id, String word, int num) {

this.id = id;

this.word = word;

this.num = num;

}

public Word() {

}

public int getId() {

return id;

}

public void setId(int id) {

this.id = id;

}

public String getWord() {

return word;

}

public void setWord(String word) {

this.word = word;

}

public int getNum() {

return num;

}

public void setNum(int num) {

this.num = num;

}

@Override

public String toString() {

return "Word{" +

"id=" + id +

", word='" + word + '\'' +

", num=" + num +

'}';

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值