package com.bai
import java.sql
import java.sql.{DriverManager, PreparedStatement, ResultSet, ResultSetMetaData}import com.alibaba.fastjson.{JSON, JSONObject}import org.apache.flink.api.common.functions.RichFlatMapFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector
object JDBCtest {
var conn: sql.Connection = _
var selectStmt: PreparedStatement = _
var insertStmt: PreparedStatement = _
def main(args: Array[String]): Unit ={
val env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
val ncStream = env.socketTextStream("hadoop104",7777)
ncStream.print("before")
val value1: DataStream[String]= ncStream.filter(_.matches("(\\d)+,(.)*"))
value1.print("after")
val value2: DataStream[Student]= value1.flatMap(newRichFlatMapFunction[String, Student]{
override def flatMap(value: String, out: Collector[Student])={
val arr: Array[String]= value.split(",",2)
conn = DriverManager.getConnection("jdbc:mysql://hadoop103:3306/test","root","123456")println(arr.toList)
insertStmt = conn.prepareStatement("insert into input (id, name) values (?,?)")
insertStmt.setInt(1,arr(0).toInt)
insertStmt.setString(2,arr(1))
insertStmt.execute()
selectStmt = conn.prepareStatement("select * from input")
val data: ResultSetMetaData = selectStmt.getMetaData
val resultSet: ResultSet = selectStmt.executeQuery()while(resultSet.next()){
val map =newjava.util.HashMap[String, String]()
val jSONObject =newJSONObject()for(i <-1 to data.getColumnCount){
val key: String = data.getColumnName(i)
val value: String = resultSet.getString(i)
map.put(key, value)
jSONObject.put(key, value)}// println(jSONObject)
val a: Student = JSON.parseObject(jSONObject.toString(), classOf[Student])
out.collect(a)}
selectStmt.close()
conn.close()}})
value2.print("dd")// value1.map(r => {// conn = DriverManager.getConnection("jdbc:mysql://hadoop103:3306/test", "root", "123456")// selectStmt = conn.prepareStatement("select * from input")// val data: ResultSetMetaData = selectStmt.getMetaData// val resultSet: ResultSet = selectStmt.executeQuery()// while (resultSet.next()) {// val map = new java.util.HashMap[String, String]()// val jSONObject = new JSONObject()// for (i <- 1 to data.getColumnCount) {// val key: String = data.getColumnName(i)// val value: String = resultSet.getString(i)// map.put(key, value)// jSONObject.put(key, value)// }// // println(jSONObject)// val value: Student = JSON.parseObject(jSONObject.toString(), classOf[Student])// println(value + "dd")// println(jSONObject.toString())// }// selectStmt.close()// conn.close()// r// }// )// inputstream.addSink(new )
env.execute()}}caseclassStudent(id: Int, name: String)
maven 依赖如下
<properties><project.build.sourceEncoding>UTF-8</project.build.sourceEncoding><flink.version>1.11.0</flink.version><scala.binary.version>2.11</scala.binary.version><scala.version>2.11.12</scala.version><log4j.version>2.12.1</log4j.version><hadoop.version>2.7.3</hadoop.version></properties><dependencies><dependency><groupId>mysql</groupId><artifactId>mysql-connector-java</artifactId><version>5.1.44</version></dependency><!-- Apache Flink dependencies --><!-- These dependencies are provided, because they should not be packaged into the JAR file.--><dependency><groupId>org.apache.flink</groupId><artifactId>flink-scala_${scala.binary.version}</artifactId><version>${flink.version}</version><!--<scope>provided</scope>--></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-streaming-scala_${scala.binary.version}</artifactId><version>${flink.version}</version><!--<scope>provided</scope>--></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-clients_${scala.binary.version}</artifactId><version>${flink.version}</version><!--<scope>provided</scope>--></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-table-api-scala-bridge_${scala.binary.version}</artifactId><version>${flink.version}</version><!--<scope>provided</scope>--></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId><version>${flink.version}</version><!--<scope>provided</scope>--></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-table-common</artifactId><version>${flink.version}</version><!--<scope>provided</scope>--></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-runtime-web_2.11</artifactId><!--<scope>provided</scope>--><version>${flink.version}</version></dependency><!-- Scala Library, provided by Flink as well.--><dependency><groupId>org.scala-lang</groupId><artifactId>scala-library</artifactId><version>${scala.version}</version><!--<scope>provided</scope>--></dependency><dependency><groupId>org.apache.flink</groupId><artifactId>flink-statebackend-rocksdb_${scala.binary.version}</artifactId><version>${flink.version}</version></dependency><!-- Add connector dependencies here. They must be in the default scope (compile).--><!-- Example:--><dependency><groupId>org.apache.flink</groupId><artifactId>flink-connector-kafka_${scala.binary.version}</artifactId><version>${flink.version}</version></dependency><dependency><groupId>org.apache.hbase</groupId><artifactId>hbase-server</artifactId><scope>provided</scope><version>2.2.4</version></dependency><dependency><groupId>com.google.guava</groupId><artifactId>guava</artifactId><version>29.0-jre</version></dependency><dependency><groupId>com.alibaba</groupId><artifactId>fastjson</artifactId><version>1.2.68</version></dependency><!-- Add logging framework, to produce console output when running in the IDE.--><!-- These dependencies are excluded from the application JAR by default.--><dependency><groupId>org.apache.logging.log4j</groupId><artifactId>log4j-slf4j-impl</artifactId><version>${log4j.version}</version><scope>runtime</scope></dependency><dependency><groupId>org.apache.logging.log4j</groupId><artifactId>log4j-api</artifactId><version>${log4j.version}</version><scope>runtime</scope></dependency><dependency><groupId>org.apache.logging.log4j</groupId><artifactId>log4j-core</artifactId><version>${log4j.version}</version><scope>runtime</scope></dependency><!-- hadoop--><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>${hadoop.version}</version><scope>provided</scope><exclusions><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-log4j12</artifactId></exclusion><exclusion><groupId>log4j</groupId><artifactId>log4j</artifactId></exclusion><exclusion><groupId>org.slf4j</groupId><artifactId>slf4j-api</artifactId></exclusion><exclusion><artifactId>commons-logging</artifactId><groupId>commons-logging</groupId></exclusion></exclusions></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>${hadoop.version}</version><scope>provided</scope><exclusions><exclusion><artifactId>log4j</artifactId><groupId>log4j</groupId></exclusion></exclusions></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-client</artifactId><version>${hadoop.version}</version><scope>provided</scope></dependency><dependency><groupId>jdk.tools</groupId><artifactId>jdk.tools</artifactId><version>1.8</version><scope>system</scope><systemPath>${JAVA_HOME}/lib/tools.jar</systemPath></dependency></dependencies><build><plugins><plugin ><groupId>org.apache.maven.plugins</groupId><artifactId>maven-compiler-plugin</artifactId><version>3.6.1</version><!-- 所有的编译都依照JDK1.8来搞 --><configuration><source>1.8</source><target>1.8</target></configuration></plugin><plugin><groupId>org.scala-tools</groupId><artifactId>maven-scala-plugin</artifactId><version>2.15.1</version><executions><execution><id>compile-scala</id><goals><goal>add-source</goal><goal>compile</goal></goals></execution><execution><id>test-compile-scala</id><goals><goal>add-source</goal><goal>testCompile</goal></goals></execution></executions></plugin><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-assembly-plugin</artifactId><configuration><archive><manifest></manifest></archive><descriptorRefs><descriptorRef>jar-with-dependencies</descriptorRef></descriptorRefs></configuration></plugin><plugin><groupId>net.alchim31.maven</groupId><artifactId>scala-maven-plugin</artifactId><version>3.2.2</version><executions><execution><!-- 声明绑定到maven的compile阶段 --><goals><goal>compile</goal><goal>testCompile</goal></goals></execution></executions></plugin><!-- 用于项目的打包插件 --><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-assembly-plugin</artifactId><version>3.0.0</version><executions><execution><id>make-assembly</id><phase>package</phase><goals><goal>single</goal></goals></execution></executions></plugin></plugins></build>
```