KAFKA的JAVA使用api

4 篇文章 0 订阅
4 篇文章 0 订阅

一:引入KAFKA的jar包

(1)maven项目

在pom.xml里面加入下面的依赖:

<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka_2.12</artifactId>
    <version>1.1.1</version>
</dependency>

(2)非maven项目

    只能手动引入包,需要下面截图里的jar包:

    

二、创建生产者

package com.kps.kafka;

import java.util.Properties;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

public class KafkaProduce {
	private static Properties properties;
	
	KafkaProduce(String url){
		properties = new Properties();
		properties.put("bootstrap.servers", url);
		properties.put("producer.type", "sync");
		properties.put("request.required.acks", "1");
		properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
		properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		properties.put("bak.partitioner.class", "kafka.producer.DefaultPartitioner");
		properties.put("bak.key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		properties.put("bak.value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
	}


	/**
	 * @Title: sendMessage
	 * @Description: 生产一个消息
	 */
	public void sendMessage(String topic, String key, String value) {
		// 实例化produce
		KafkaProducer<String, String> kp = new KafkaProducer<String, String>(properties);

		// 消息封装
		ProducerRecord<String, String> pr = new ProducerRecord<String, String>(topic, key, value);

		// 发送数据
		kp.send(pr, new Callback() {
			// 回调函数
			@Override
			public void onCompletion(RecordMetadata metadata, Exception exception) {
				if (null != exception) {
					System.out.println("记录的offset在:" + metadata.offset());
					System.out.println(exception.getMessage() + exception);
				}
			}
		});

		// 关闭produce
		kp.close();
	}
}

三、创建消费者

package com.kps.kafka;

import java.util.Collections;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;


public class KafkaConsume {
	private static Properties properties;
	private long SIZE = 100;
	KafkaConsumer<String, String> consumer;
	
	KafkaConsume(String url){
		properties = new Properties();
		properties.put("bootstrap.servers", url);
		properties.put("zookeeper.connect", url);
		properties.put("group.id", "kafkaDemo");
		properties.put("zookeeper.session.timeout.ms", "4000");
		properties.put("zookeeper.sync.time.ms", "200");
		properties.put("auto.commit.interval.ms", "1000");
		properties.put("auto.offset.reset", "earliest");
		properties.put("serializer.class", "kafka.serializer.StringEncoder");
		properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
	}


	/**
	 * @Title: getMessage
	 * @Description: 消费一个消息
	 */
	public void getMessage(String topic) {
		consumer = new KafkaConsumer<String, String>(properties);
		// 订阅主题
		consumer.subscribe(Collections.singletonList(topic));
		while (true) {
			ConsumerRecords<String, String> records = consumer.poll(SIZE);
			for (ConsumerRecord<String, String> record : records) {
				System.out.printf("offset = %d, key = %s, value = %s", record.offset(), record.key(), record.value());
				System.out.println();
			}
		}
	}

	public void closeConsumer() {
		consumer.close();
	}
}

四、主函数

package com.kps.kafka;

public class Main 
{
    public static void main( String[] args )
    {
    	String url = "192.168.40.150:9092";
    	String topic = "testTopic";
    	
        //生产者
    	KafkaProduce kp = new KafkaProduce(url);
        //生产一条消息
    	kp.sendMessage(topic, "key", "{\"id\":\"1234312\",\"startDate\":\"20181009\",\"endDate\":\"20181009\",\"custname\":\"张飞\",\"custid\":\"100000001\"}");
    	
        //消费者
    	KafkaConsume kc = new KafkaConsume(url);
        //消费指定topic的数据
    	kc.getMessage(topic);
    }
}

五、其他加载生产者和消费者的方式

(1)创建kafka配置文件  kafka.properties

#produce
bootstrap.servers=localhost:9092
producer.type=sync
request.required.acks=1
serializer.class=kafka.serializer.DefaultEncoder
key.serializer=org.apache.kafka.common.serialization.StringSerializer
value.serializer=org.apache.kafka.common.serialization.StringSerializer
bak.partitioner.class=kafka.producer.DefaultPartitioner
bak.key.serializer=org.apache.kafka.common.serialization.StringSerializer
bak.value.serializer=org.apache.kafka.common.serialization.StringSerializer

#consume
zookeeper.connect=localhost:2181  
group.id=kafkaDemo
zookeeper.session.timeout.ms=4000  
zookeeper.sync.time.ms=200  
auto.commit.interval.ms=1000  
auto.offset.reset=earliest  
serializer.class=kafka.serializer.StringEncoder 
key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
value.deserializer=org.apache.kafka.common.serialization.StringDeserializer

(2)创建生产者

package com.kps.kafka;

import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

public class KafkaProduce {
	private static Properties properties;

	static {
		properties = new Properties();
		InputStream inStream = KafkaProduce.class.getClassLoader().getResourceAsStream("kafka.properties");
		try {
			properties.load(inStream);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	/**
	 * @Title: sendMessage
	 * @Description: 生产一个消息
	 */
	public void sendMessage(String topic, String key, String value) {
		// 实例化produce
		KafkaProducer<String, String> kp = new KafkaProducer<String, String>(properties);

		// 消息封装
		ProducerRecord<String, String> pr = new ProducerRecord<String, String>(topic, key, value);

		// 发送数据
		kp.send(pr, new Callback() {
			// 回调函数
			@Override
			public void onCompletion(RecordMetadata metadata, Exception exception) {
				if (null != exception) {
					System.out.println("记录的offset在:" + metadata.offset());
					System.out.println(exception.getMessage() + exception);
				}
			}
		});

		// 关闭produce
		kp.close();
	}
}

(3)创建消费者

package com.kps.kafka;

import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;


public class KafkaConsume {
	private static Properties properties;
	private long SIZE = 100;
	KafkaConsumer<String, String> consumer;
	
	static {
		properties = new Properties();
		InputStream inStream = KafkaConsume.class.getClassLoader().getResourceAsStream("kafka.properties");
		try {
			properties.load(inStream);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}


	/**
	 * @Title: getMessage
	 * @Description: 消费一个消息
	 */
	public void getMessage(String topic) {
		consumer = new KafkaConsumer<String, String>(properties);
		// 订阅主题
		consumer.subscribe(Collections.singletonList(topic));
		while (true) {
			ConsumerRecords<String, String> records = consumer.poll(SIZE);
			for (ConsumerRecord<String, String> record : records) {
				System.out.printf("offset = %d, key = %s, value = %s", record.offset(), record.key(), record.value());
				System.out.println();
			}
		}
	}

	public void closeConsumer() {
		consumer.close();
	}
}

(4)主函数

package com.kps.kafka;

public class Main 
{
    public static void main( String[] args )
    {
    	String topic = "testTopic";
    	
    	KafkaProduce kp = new KafkaProduce();
    	kp.sendMessage(topic, "key", "{\"id\":\"1234312\",\"startDate\":\"20181009\",\"endDate\":\"20181009\",\"custname\":\"张飞\",\"custid\":\"100000001\"}");
    	
    	KafkaConsume kc = new KafkaConsume();
    	kc.getMessage(topic);
    }
}

 

  • 2
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值