package com.fline.kafka.customer;
import java.io.PrintStream;
import java.util.Properties;
import java.util.Random;
import java.util.Scanner;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.common.serialization.StringSerializer;
public class KafkaProducerExample {
public static void main(String[] args) {
System.setProperty("java.security.auth.login.config", ".\\src\\main\\resources\\kerberos\\kafka-client-jaas.conf");
System.setProperty("java.security.krb5.conf", ".\\src\\main\\resources\\kerberos\\krb5.conf");
Properties properties = new Properties();
// properties.setProperty("bootstrap.servers", "47.92.170.121:9092,47.92.166.91:9092,47.92.162.206:9092");
properties.setProperty("bootstrap.servers", "127.0.0.1:9092");
// properties.setProperty("sasl.jaas.config", "com.sun.security.auth.module.Krb5LoginModule required \n useKeyTab=true \n storeKey=true \n keyTab=\"kerberos/rcp.keytab\" \n principal=\"hadoop@HADOOP.COM\";");
properties.setProperty("sasl.kerberos.service.name", "kafka-server");
properties.setProperty("sasl.mechanism", "GSSAPI");
properties.setProperty("security.protocol", "SASL_PLAINTEXT");
Properties properties1 = ProducerConfig.addSerializerToConfig(properties, new StringSerializer(), new StringSerializer());
// KafkaProducer<String, String> stringStringKafkaProducer = new KafkaProducer(properties1);
// Random random = new Random();
//
//
// ProducerRecord<String, String> rcp_test_001 = new ProducerRecord("SX2_ALM_HEARTBEAT", "random.nextInt() ");
// stringStringKafkaProducer.send(rcp_test_001);
// stringStringKafkaProducer.flush();
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties1);
ProducerRecord<String, String> record = new ProducerRecord<>("SX2_ALM_HEARTBEAT", "msg");
kafkaProducer.send(record, new Callback() {
@Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
if (exception != null) {
System.err.println("发送数据到kafka中,发生了异常.");
exception.printStackTrace();
return;
}
System.out.println("topic: " + metadata.topic() + " offset: " + metadata.offset() + " partition: "
+ metadata.partition() + "msg "+"msg");
}
});
}
}
package com.prq;
import org.apache.kafka.clients.producer.*;
import java.util.Properties;
public class KafkaProducerDemo {
public static void main(String[] args) {
Properties properties = new Properties();
// properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "47.92.170.121:9797");
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka-server1:9092,kafka-server2:9092,kafka-server3:9092");
// properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "47.92.170.121:9092,47.92.166.91:9092,47.92.162.206:9092");
// properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka-server:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
properties.put(ProducerConfig.ACKS_CONFIG, "all");
properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
properties.put(ProducerConfig.RETRIES_CONFIG, 0);
properties.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, 300);
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
// KafkaProducer 是线程安全的,可以多个线程使用用一个 KafkaProducer
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);
for (int i = 0; i < 2; i++) {
String msg = "{\"id\":\"abc\",\"station\":[{\"station_name\":\"Z01\",\"subsystem\":[{\"subsystem_name\":\"PSD\",\"device_type\":[{\"device_type_name\":\"HDM\",\"device\":[{\"device_no\":\"1\",\"device_label\":\"HDM-01\",\"device_location\":\"车控室\"}]}]}]}]}";
ProducerRecord<String, String> record = new ProducerRecord<>("SX2_ALM_HEARTBEAT", msg);
kafkaProducer.send(record, new Callback() {
@Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
if (exception != null) {
System.err.println("发送数据到kafka中,发生了异常.");
exception.printStackTrace();
return;
}
System.out.println("topic: " + metadata.topic() + " offset: " + metadata.offset() + " partition: "
+ metadata.partition() + "msg "+msg);
}
});
}
System.out.println("消息发送完成");
kafkaProducer.close();
}
}
发送kafka回调
# ListenableFuture send = kafkaTemplate.send(new ProducerRecord("", ""));
ListenableFuture future = kafkaTemplate.send(topic, data);
future.addCallback(o -> {
CommonFileLogger.logger.info("send to kafka success " + o);
}, err -> {
CommonFileLogger.logger.error("send to kafka failed", err);
});