目录
Flink接收Kafka和JSON反序列化
使用ObjectMapper反序列化
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
...
ObjectMapper om = new ObjectMapper();
SecEvent event = new SecEvent("eventype1","myname",1233424234,"0001");
String json = om.writeValueAsString(event);
System.out.println(json);
//(String eventType, String imageName, long timestamp, String instanceId)
SecEvent event_out = om.readValue(json, SecEvent.class);
System.out.println(event.toString());
System.out.println(event_out.toString());
接收Kafka事件消息
package com.practice.kafka;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
//import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.util.Collector;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import java.util.Properties;
import java.text.SimpleDateFormat;
import java.io.IOException;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.DeserializationSchema;
public class KafkaSecEvent {
//事件对象
public static class SecEvent {
public String eventType;
public String imageName;
public long timestamp;
public String instanceId;
public SecEvent() {
}
public SecEvent(String eventType, String imageName, long timestamp, String instanceId) {
this.eventType = eventType;
this.imageName = imageName;
this.timestamp = timestamp;
this.instanceId = instanceId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (o != null && getClass() == o.getClass()) {
SecEvent that = (SecEvent) o;
return timestamp == that.timestamp &&
eventType.equals(that.eventType) &&
imageName.equals(that.imageName) &&
instanceId.equals(that.instanceId);
} else {
return false;
}
}
@Override
public int hashCode() {
int result = eventType != null ? eventType.hashCode() : 0;
result = 31 * result + (imageName != null ? imageName.hashCode() : 0);
result = 31 * result + (int) (timestamp ^ (timestamp >>> 32));
result = 31 * result + (instanceId != null ? instanceId.hashCode() : 0);
return result;
}
@Override
public String toString() {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
String formattedTimestamp = format.format(timestamp);
return "{\n" +
" \"eventType\": \"" + eventType + "\"\n," +
" \"imageName\": \"" + imageName + "\"\n," +
" \"timestamp\": \"" + formattedTimestamp + "\"\n," +
" \"instanceId\": \"" + instanceId + "\"\n" +
"}";
}
}
//序列化
public static class SecEventJSONSerializer
implements SerializationSchema<KafkaSecEvent.SecEvent>, DeserializationSchema<KafkaSecEvent.SecEvent> {
private final ObjectMapper mapper = new ObjectMapper();
@Override
public byte[] serialize(KafkaSecEvent.SecEvent secEvent) {
try {
return mapper.writeValueAsBytes(secEvent);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@Override
public SecEvent deserialize(byte[] bytes) throws IOException {
System.out.println("hello");
return mapper.readValue(bytes, SecEvent.class);
}
@Override
public boolean isEndOfStream(SecEvent secEvent) {
return false;
}
@Override
public TypeInformation<SecEvent> getProducedType() {
return TypeExtractor.getForClass(SecEvent.class);
}
}
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// 连接kafka数据流
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "192.168.11.45:6669");
FlinkKafkaConsumer010<SecEvent> consumer = new FlinkKafkaConsumer010<>("event", new SecEventJSONSerializer(), properties);
//从最早开始消费
//consumer.setStartFromEarliest();
DataStream<SecEvent> stream = env.addSource(consumer);
stream.process(new ProcessFunction<SecEvent, String>() {
@Override
public void processElement(SecEvent event, Context ctx, Collector<String> out) throws Exception {
out.collect(event.toString());
System.out.println(event.toString());
}
}).print();
env.execute();
}
}
Python发送Kfaka消息事件
from kafka import KafkaProducer
producer = KafkaProducer(bootstrap_servers=['192.168.11.45:6669'])
future = producer.send('event' , key= b'my_key', value= b'{"eventType":"eventype1","imageName":"myname", \
"timestamp":1233424234,"instanceId":"0001"}', partition= 0)
测试结果
hello
{
"event_type": "eventype1"
, "image_name": "zuozuofdafas"
, "timestamp": "1970-01-15 06:37:04.234"
, "instance_id": "0001"
}
{
"event_type": "eventype1"
, "image_name": "zuozuofdafas"
, "timestamp": "1970-01-15 06:37:04.234"
, "instance_id": "0001"
}
pom.xml配置
由于使用的kafka版本是0.10,因此使用的库也需要时0.10版本对应的。
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<depe