1.背景
spark streaming direct方式读取kafka消息,设置checkpoint 并写入mysql
2.发送数据到kafka
package com.bigdata.kafka;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
public class KafkaSend {
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "dn1:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//生产者发送消息
String topic = "ss";//topic
Producer<String, String> procuder = new KafkaProducer<String,String>(props);
for (int i = 1; i <= 10000; i++) {
String value = "value_" + i;
ProducerRecord<String, String> msg = new ProducerRecord<String, String>(topic, value);
procuder.send(msg);
try {
Thread.sleep(1000);
System.out.println(msg.toString());
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println("send message over.");
procuder.close(100,TimeUnit.MILLISECONDS);
}
}
3.spark streaming 程序消费
package com.