import org.apache.avro.Schema
import org.apache.avro.Schema.Parser
val schemaString = """{
"type": "record",
"name": "User",
"fields": [
{"name": "name", "type": "string"},
{"name": "age", "type": "int"}
]
}"""
val topicName = "test-topic"
val bootstrapServers = "localhost:9092"
val schema: Schema = new Parser().parse(schemaString)
然后,创建Kafka生产者并将Avro序列化器设置为值的序列化器。
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import io.confluent.kafka.serializers.KafkaAvroSerializer
val props = new Properties()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers)
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getName)
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getName)
props.put("schema.registry.url", "http://localhost:8081")
val producer = new KafkaProducer[String, GenericRecord](props)
创建要发送到Kafka的Avro消息。
import org.apache.avro.generic.GenericData.Record
import org.apache.avro.generic.GenericRecord
import org.apache.avro.util.Utf8
val user = new Record(schema)
user.put("name", new Utf8("Alice"))
user.put("age", 25)
val record = new ProducerRecord[String, GenericRecord](topicName, user)
最后,使用生产者将Avro消息发送到Kafka。
producer.send(record)