count time(ms) sleep(ms) 存储率(%)
200000 953 1000 63.75
200000 777 2000 100
package gao.test.testKafka;
import java.util.Properties;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
public class TestMulty {
//测试
public static void main(String[]args){
test02();
}
public static void test02() {
Properties props = new Properties();
props.put("bootstrap.servers", serverlist);
props.put("client.id", "SocProducer");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("buffer.memory", "134217728"); // 128M 缓冲区大小
props.put("batch.size", "524288"); // 512K 批量提交大小
props.put("linger.ms", "1000"); // 1s,若不到批量提交大小,则延迟1S发出
props.put("acks ", "1"); //表示leader副本必须应答此produce请求并写入消息到本地日志,之后produce请求被认为成功
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(props);
ProducerRecord record = new ProducerRecord(TOPIC_NAME, "236511685626687498$ori$10942$ori$192.168.1.1$ori$192.168.1.2$ori$13002$ori$null$ori$1620718378877$ori$10$ori$10$ori$null$ori$null$ori$null");
long start = System.currentTimeMillis() ;
for (int i = 0; i <200000 ; i++) {
try {
kafkaProducer.send(record);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
System.out.println(System.currentTimeMillis()-start);
try {
Thread.sleep(2000); //等待KafkaProducer缓存持久化到kafka
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}