flink kafka stream
一、最重要的,jar包打包时,要带依赖的那种,要不然放到集群中找不到包。
org.apache.maven.plugins maven-compiler-plugin 3.8.0 1.8 1.8 UTF-8 <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<archive>
<manifest>
<!-- 可以设置jar包的入口类(可选) -->
<mainClass>com.sensedeal.warning.FlinkService</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
二、有关flink的依赖,打包时要打开 runtime,否则会跟集群中的flink包冲突。
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>${flink-version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>${flink-version}</version>
<scope>runtime</scope>
</dependency>
三、代码,代码中最好用new 对象,不要用utils那种static,容易出问题。
FlinkKafkaConsumer myConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), prop);
myConsumer.setStartFromEarliest();
DataStream text = env.addSource(myConsumer)
// DataStream text = env.fromElements(json)
.setParallelism(1);
SingleOutputStreamOperator<List<MonitorMessageInfo>> singleOutputStreamOperator = text.process(new MyProcessFunction())
.setParallelism(2);
singleOutputStreamOperator.addSink(new SqlserverSink())
.setParallelism(2)
.name("sqlserver add...");
env.execute("warning Streaming From Kafka");
import com.sensedeal.warning.entity.MonitorMessageInfo;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import java.util.ArrayList;
import java.util.List;
public class MyProcessFunction extends ProcessFunction<String,List> {
@Override
public void processElement(String s, Context context, Collector<List<MonitorMessageInfo>> collector) throws Exception {
List<MonitorMessageInfo> infoList = new ArrayList<>();
try {
JsonProcess jsonProcess = new JsonProcess();
infoList = jsonProcess.jsonProcess(s.toString());
System.out.println("-----end...");
collector.collect(infoList);
}catch (Exception e){
System.out.println("------"+e);
e.printStackTrace();
}
}
}
import com.sensedeal.warning.entity.MonitorMessageInfo;
import com.sensedeal.warning.utils.SqlserverJdbcUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.springframework.jdbc.core.JdbcTemplate;
import java.util.List;
public class SqlserverSink extends RichSinkFunction<List> {
JdbcTemplate sqlserverJdbcTemplate;
@Override
public void open(Configuration parameters) throws Exception {
sqlserverJdbcTemplate = new JdbcTemplate(SqlserverJdbcUtil.getSource());
super.open(parameters);
}
@Override
public void invoke(List<MonitorMessageInfo> value, Context context) throws Exception {
System.out.println("-----1");
String sql = "insert into ***(*,*,*) values (?,?,?,?,?,?,?,?,?,?,?,?)";
for (MonitorMessageInfo info : value) {
sqlserverJdbcTemplate.update(sql, info.getMonitId(), info.getCompanyCode(), info.getCompanyName(), info.getCreateTime(), info.getMessage(), info.getSpreadUid(), info.getToUser(), info.getSpreadName(), info.getExpress(), info.getNotice(), info.getInfo(), info.getMainId());
}
System.out.println("-----2");
}
@Override
public void close() throws Exception {
super.close();
}
}
import com.alibaba.druid.pool.DruidDataSourceFactory;
import lombok.extern.slf4j.Slf4j;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
@Slf4j
public class MysqlJdbcUtil {
private static DataSource source=null;//注意这里是私有,静态变量。
static {//这里还是之前的加载驱动
Properties p = new Properties();
try {
//加载文件 得到一个 druid.properties 的文件。
// p.load(MysqlJdbcUtil.class.getClassLoader().getResourceAsStream(“mysqlDruid.properties”));
p.load(Thread.currentThread().getContextClassLoader().getResourceAsStream(“mysqlDruid.properties”));
//获取数据源
source= DruidDataSourceFactory.createDataSource(p);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//建立连接
public static Connection getConnetion() {
try {
//利用连接池连接对象
Connection con = source.getConnection();
return con;
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
public static DataSource getSource(){
return source;
}