问题:项目想用SSL来链接kafka,在使用的过程中遇到几个特别的坑
现象:
程序在消费和生产的时候出现错误:
2019/04/02 20:02:22 unable to create kafka client: "kafka: client has run out of available brokers to talk to (Is your cluster reachable?)"
然后看kafka里面出现错误是:
[2019-04-02 22:27:24,378] WARN Failed to send SSL Close message (org.apache.kafka.common.network.SslTransportLayer)
java.io.IOException: 断开的管道
at sun.nio.ch.FileDispatcherImpl.write0(Native Method)
at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:47)
at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:93)
at sun.nio.ch.IOUtil.write(IOUtil.java:65)
at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:471)
at org.apache.kafka.common.network.SslTransportLayer.flush(SslTransportLayer.java:212)
at org.apache.kafka.common.network.SslTransportLayer.close(SslTransportLayer.java:175)
at org.apache.kafka.common.utils.Utils.closeAll(Utils.java:703)
at org.apache.kafka.common.network.KafkaChannel.close(KafkaChannel.java:61)
at org.apache.kafka.common.network.Selector.doClose(Selector.java:739)
at org.apache.kafka.common.network.Selector.close(Selector.java:727)
at org.apache.kafka.common.network.Selector.pollSelectionKeys(Selector.java:520)
at org.apache.kafka.common.network.Selector.poll(Selector.java:412)
at kafka.network.Processor.poll(SocketServer.scala:551)
at kafka.network.Processor.run(SocketServer.scala:468)
at java.lang.Thread.run(Thread.java:748)
出现这个的原因是我的证书是自己签名的,验证的时候通过不了,所以会出现这个问题,我们修改的地方是配置文件不验证服务器的证书
参数讲解:
设置验证证书InsecureSkipVerify: false, 不验证证书InsecureSkipVerify: true
验证内容: 1.证书用的CN的名称是否一致 2.证书是否是权威的签发 不是权威的话就出现certificate signed by unknown authority
自建证书未验证
给配置文件加完验证就可以通过了!
代码实现过程:
- 生产和消费
package util
import (
"log"
"github.com/Shopify/sarama"
"crypto/tls"
"io/ioutil"
"crypto/x509"
"os"
"os/signal"
"sync"
"fmt"
"time"
)
func KafkaConsumer(addrs []string,topics string) {
tlsConfig, err := NewTLSConfig("client.cer.pem",
"client.key.pem","server.cer.pem")
if err != nil {
log.Fatal(err)
}
//客户端不对服务端验证
tlsConfig.InsecureSkipVerify = true
//创建一个配置对象
consumerConfig := sarama.NewConfig()
//启动TLS通讯
consumerConfig.Net.TLS.Enable = true
consumerConfig.Net.TLS.Config = tlsConfig
//创建一个客户消费者
client, err := sarama.NewClient(addrs, consumerConfig)
if err != nil {
log.Fatalf("unable to create kafka client: %q", err)
}
consumer, err := sarama.NewConsumerFromClient(client)
if err != nil {
log.Fatal(err)
}
defer consumer.Close()
consumerLoop(consumer, topics)
}
func NewTLSConfig(clientCertFile, clientKeyFile, caCertFile string) (*tls.Config, error) {
tlsConfig := tls.Config{}
// 加载客户端证书
cert, err := tls.LoadX509KeyPair(clientCertFile, clientKeyFile)
if err != nil {
return &tlsConfig, err
}
tlsConfig.Certificates = []tls.Certificate{cert}
// 加载CA证书
caCert, err := ioutil.ReadFile(caCertFile)
if err != nil {
return &tlsConfig, err
}
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM(caCert)
tlsConfig.RootCAs = caCertPool
tlsConfig.BuildNameToCertificate()
return &tlsConfig, err
}
func consumerLoop(consumer sarama.Consumer, topic string) {
partitions, err := consumer.Partitions(topic)
if err != nil {
log.Println("unable to fetch partition IDs for the topic", topic, err)
return
}
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
var wg sync.WaitGroup
for partition := range partitions {
wg.Add(1)
go func() {
consumePartition(consumer, int32(partition), signals,topic)
wg.Done()
}()
}
wg.Wait()
}
func consumePartition(consumer sarama.Consumer, partition int32, signals chan os.Signal,topic string) {
log.Println("Receving on partition", partition)
//partitionConsumer, err := consumer.ConsumePartition("zhang", partition, sarama.OffsetNewest)
partitionConsumer, err := consumer.ConsumePartition(topic, partition, sarama.OffsetOldest)
if err != nil {
log.Println(err)
return
}
defer func() {
if err := partitionConsumer.Close(); err != nil {
log.Println(err)
}
}()
consumed := 0
ConsumerLoop:
for {
select {
case msg := <-partitionConsumer.Messages():
log.Printf("Consumed message offset %d\nData: %s\n", msg.Offset, msg.Value)
consumed++
case <-signals:
break ConsumerLoop
}
}
log.Printf("Consumed: %d\n", consumed)
}
//kafka生产者
func KafkaProducer(addres []string,topic string,sendValue string) {
fmt.Println("producer_test\n")
//设置配置
config := sarama.NewConfig()
//加载证书
tlsConfig, err := NewTLSConfig("client.cer.pem",
"client.key.pem","server.cer.pem")
if err != nil {
log.Fatal(err)
}
//客户端不对服务端验证
tlsConfig.InsecureSkipVerify = true
//启动SSL认证
config.Net.TLS.Enable = true
config.Net.TLS.Config = tlsConfig
//是否等待成功和失败后的响应,只有上面的RequireAcks(等待服务器所有副本都保存成功后的响应)设置不是NoReponse才有用
config.Producer.Return.Successes = true
config.Producer.Timeout = 5 * time.Second
client, err := sarama.NewClient(addres, config)
if err != nil {
log.Fatalf("unable to create kafka client: %q", err)
}
//使用配置,新建一个异步生产者
p, err := sarama.NewAsyncProducerFromClient(client)
if err != nil {
log.Fatal(err)
}
defer p.Close()
loopProducer(p, topic, sendValue)
}
func loopProducer(producer sarama.AsyncProducer, topic string, text string) {
producer.Input() <- &sarama.ProducerMessage{Topic: topic, Key: nil, Value: sarama.StringEncoder(text)}
log.Printf("Produced message: [%s]\n",text)
}
func SaramaProducer(addres []string) {
config := sarama.NewConfig()
//等待服务器所有副本都保存成功后的响应
config.Producer.RequiredAcks = sarama.WaitForAll
//随机向partition发送消息
config.Producer.Partitioner = sarama.NewRandomPartitioner
//是否等待成功和失败后的响应,只有上面的RequireAcks设置不是NoReponse这里才有用.
config.Producer.Return.Successes = true
config.Producer.Return.Errors = true
//设置使用的kafka版本,如果低于V0_10_0_0版本,消息中的timestrap没有作用.需要消费和生产同时配置
//注意,版本设置不对的话,kafka会返回很奇怪的错误,并且无法成功发送消息
config.Version = sarama.V0_10_2_1
fmt.Println("start make producer")
//使用配置,新建一个异步生产者
producer, e := sarama.NewAsyncProducer(addres, config)
if e != nil {
fmt.Println(e)
return
}
defer producer.AsyncClose()
//循环判断哪个通道发送过来数据.
fmt.Println("start goroutine")
go func(p sarama.AsyncProducer) {
for{
select {
case <-p.Successes():
//fmt.Println("offset: ", suc.Offset, "timestamp: ", suc.Timestamp.String(), "partitions: ", suc.Partition)
case fail := <-p.Errors():
fmt.Println("err: ", fail.Err)
}
}
}(producer)
var value string
for i:=0;;i++ {
time.Sleep(500*time.Millisecond)
time11:=time.Now()
value = "this is a message 0606 "+time11.Format("15:04:05")
// 发送的消息,主题。
// 注意:这里的msg必须得是新构建的变量,不然你会发现发送过去的消息内容都是一样的,因为批次发送消息的关系。
msg := &sarama.ProducerMessage{
Topic: "zhang",
}
//将字符串转化为字节数组
msg.Value = sarama.ByteEncoder(value)
//fmt.Println(value)
//使用通道发送
producer.Input() <- msg
}
}
- 测试
package test
import (
"testing"
"copCRoad/util"
)
var Address = []string{"IP:6666"}
var topic ="zhang"
var srcValue = "恭喜你用SSL已经连上kafka啦"
func TestKafkaProducter(t *testing.T) {
util.KafkaProducer(Address,topic,srcValue)
}
func TestKafkaConsumer(t *testing.T) {
util.KafkaConsumer(Address,topic)
}
总结:
代码还是不够精简,后面在进行优化!以上代码已经验证没有问题,如果有问题也是你配的证书有问题,可以参考我另一篇博客:https://blog.csdn.net/weixin_36771703/article/details/88762831;送人玫瑰,手留余香!
参考:https://blog.csdn.net/kdpujie/article/details/79093595
https://blog.csdn.net/wangshubo1989/article/details/77508738