写入postgresql数据库
package jdbc.psql.csvtopsql;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
public class CSVSource {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DataStreamSource<String> text = env.readTextFile("C:\\Users\\Administrator\\Desktop\\资料\\flink\\test.txt");
DataStream<Tuple2<String, Integer>> counts = text.flatMap(new Tokenizer())
.keyBy(0)
.sum(1);
counts.writeAsCsv("C:\\Users\\Administrator\\Desktop\\资料\\flink\\csv.txt");
counts.print();
counts.addSink(new PgsqlSink());
env.execute("txt write to the psql demo");
}
public static class Tokenizer implements FlatMapFunction<String,Tuple2<String,Integer>> {
@Override
public void flatMap(String value, Collector<Tuple2<String,Integer>> out){
String[] tokens = value.toLowerCase().split("\\W+");
for (String token:tokens){
if (token.length()>0){
out.collect(new Tuple2<String, Integer>(token,1));
}
}
}
}
}
package jdbc.psql.csvtopsql;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
public class PgsqlSink extends RichSinkFunction<Tuple2<String, Integer>> {
private static final long serialVersionUID = 1L;
private Connection connection;
private PreparedStatement preparedStatement;
@Override
public void open(Configuration parameters) throws Exception {
//JDBC连接信息
String USERNAME = "postgres";
String PASSWORD = "passwd";
String driverClass = "org.postgresql.Driver";
String URL = "jdbc:postgresql://192.168.108.01:5432/flink";
//加载jdbc的驱动
Class.forName(driverClass);
//获取数据库的连接
connection = DriverManager.getConnection(URL, USERNAME, PASSWORD);
String sql = "insert into flink(word, num) values (?,?)";
preparedStatement = connection.prepareStatement(sql);
super.open(parameters);
}
@Override
public void invoke(Tuple2<String, Integer> value, Context context) {
try {
String word = value.f0;
Integer num = value.f1;
preparedStatement.setString(1, word);
preparedStatement.setInt(2, num);
preparedStatement.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void close() throws Exception {
if (preparedStatement != null) {
preparedStatement.close();
}
if (connection != null) {
connection.close();
}
super.close();
}
}
读取postgresql数据库
package jdbc.psql.psqlSource;
import jdbc.psql.Word;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
public class PsqlSource extends RichSourceFunction<Word> {
private static final long serialVersionUID = 1L;
private Connection connection;
private PreparedStatement preparedStatement;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
String USERNAME = "postgres";
String PASSWORD = "passwd";
String driverClass = "org.postgresql.Driver";
String URL = "jdbc:postgresql://192.168.108.01:5432/flink";
Class.forName(driverClass);
connection = DriverManager.getConnection(URL, USERNAME, PASSWORD);
String sql = " SELECT * FROM public.flink ";
preparedStatement = connection.prepareStatement(sql);
}
@Override
public void run(SourceContext<Word> sourceContext) throws Exception {
try {
ResultSet resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
Word word = new Word();
word.setId(resultSet.getInt("id"));
word.setWord(resultSet.getString("word"));
word.setNum(resultSet.getInt("num"));
sourceContext.collect(word);
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void cancel() {
}
@Override
public void close() throws Exception {
super.close();
if (connection != null) {
connection.close();
}
if (preparedStatement != null) {
preparedStatement.close();
}
}
}
package jdbc.psql.psqlSource;
import jdbc.psql.Word;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
public class WordSourceFromPsql {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().disableSysoutLogging();
DataStream<Word> stream = env.addSource(new PsqlSource());
stream.print();
env.execute("PostGreSQL Source to Flink demo");
}
}
package jdbc.psql;
public class Word {
private int id;
private String word;
private int num;
public Word(int id, String word, int num) {
this.id = id;
this.word = word;
this.num = num;
}
public Word() {
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getWord() {
return word;
}
public void setWord(String word) {
this.word = word;
}
public int getNum() {
return num;
}
public void setNum(int num) {
this.num = num;
}
@Override
public String toString() {
return "Word{" +
"id=" + id +
", word='" + word + '\'' +
", num=" + num +
'}';
}
}