Flink自定义activemq数据源
使用flink处理实时流式数据,数据源为activemq,消费指定队列的数据。定义数据源的类,继承RichSourceFunction类,主要是重写run方法,cancel方法是再任务取消的时候修改标志位,使run方法里面的循环推出,open方法可以再初始化连接的时候用到。
一下为代码示例:
package com.jietao.flink.stduy;
import com.alibaba.fastjson.JSONObject;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.command.ActiveMQBytesMessage;
import org.apache.activemq.command.ActiveMQMessage;
import org.apache.activemq.command.ActiveMQTextMessage;
import org.apache.activemq.util.ByteSequence;
import org.apache.activemq.util.ByteSequenceData;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.rest.ConnectionException;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import sun.security.krb5.Config;
import javax.jms.*;
import javax.swing.text.TabExpander;
import javax.xml.soap.Text;
public class ActiveMQSource extends RichSourceFunction<ActiveMQData> {
private static Configuration _configuration;
private ConnectionFactory connectionFactory;
private Connection connection;
private MessageConsumer messageConsumer;
private Boolean running = true;
public ActiveMQSource(Configuration configuration){
_configuration = configuration;
}
@Override
public void open(Configuration parameters) throws Exception {
String user = _configuration.getString("user", "");
String password = _configuration.getString("password", "");
String host = _configuration.getString("host", "");
connectionFactory = new ActiveMQConnectionFactory(user, password, host);
init();
System.out.println("open .... ");
}
void init(){
try{
connection = connectionFactory.createConnection();
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue queue = session.createQueue("test2");
messageConsumer = session.createConsumer(queue);
}catch (Exception e){
System.out.println("create connection factory fail, " + e.getMessage());
}
}
public void run(final SourceContext<ActiveMQData> sourceContext) throws Exception {
// messageConsumer.setMessageListener(new MessageListener() {
public void onMessage(Message message){
TextMessage textMessage = (TextMessage) message;
try{
String topic = textMessage.getJMSDestination().toString();
String payload = textMessage.getText();
Long timestamp = textMessage.getJMSTimestamp();
sourceContext.collect(new ActiveMQData(topic, payload, timestamp));
}catch (Exception e){
System.out.println("error, " + e.getMessage());
}
}
});
while (running){
try{
//TextMessage textMessage = (TextMessage) messageConsumer.receive();
ActiveMQMessage activeMQMessage = (ActiveMQMessage) messageConsumer.receive();
//ActiveMQTextMessage activeMQTextMessage = (ActiveMQTextMessage) messageConsumer.receive();
//ActiveMQBytesMessage activeMQBytesMessage = (ActiveMQBytesMessage) messageConsumer.receive();
// System.out.println("1122: " + activeMQMessage.getMessage().getOriginalDestination());
// System.out.println("1133: " + activeMQMessage.getContent());
// System.out.println("1144: " + activeMQMessage.getMessage());
ByteSequence byteSequence = activeMQMessage.getContent();
ActiveMQData activeMQData = JSONObject.parseObject(byteSequence.getData(), ActiveMQData.class);
activeMQData.setTopic(activeMQMessage.getMessage().getOriginalDestination().toString());
activeMQData.setSize(activeMQMessage.getContent().getLength());
// sourceContext.collect(new ActiveMQData(topic, payload, timestamp, size));
sourceContext.collect(activeMQData);
}
catch (Exception e){
System.out.println("error, " + e.getMessage());
Thread.sleep(5000);
init();
}
}
}
public void cancel() {
running = false;
try{
connection.close();
}catch (Exception e){
System.out.println("close connection fail, " + e.getMessage());
}
}
}
主入口代码:
import com.jietao.flink.stduy.*;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.functions.FormattingMapper;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
public class Main {
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
environment.setParallelism(2);
// Configuration configuration = new Configuration();
// configuration.setString("tao", "tao");
// DataStreamSource<Sensor> dataStreamSource = environment.addSource(new SensorSource(configuration));
// dataStreamSource.print();
Configuration configuration1 = new Configuration();
configuration1.setString("user", "admin");
configuration1.setString("password", "password");
configuration1.setString("host", "tcp://127.0.0.1:61616");
DataStreamSource<ActiveMQData> dataStreamSource1 = environment.addSource(new ActiveMQSource(configuration1));
dataStreamSource1.map(new MapFunction<ActiveMQData, OutTemaplate> () {
private static final long serialVersionUID = 520662279389510260L;
public OutTemaplate map(ActiveMQData activeMQData) {
System.out.println("ttt: " + activeMQData.toString());
String des = activeMQData.getTopic();
String topic = des.substring(8).replace(".", "/");
OutTemaplate outTemaplate = new OutTemaplate();
outTemaplate.setRf(topic);
outTemaplate.setPayload(activeMQData.getPayload());
return outTemaplate;
}
}).print().setParallelism(2);
environment.execute("test");
}
}