/// <summary>
/// 指定的组别的消费者开始消费指定主题的消息
/// </summary>
/// <param name="broker">Kafka消息服务器的地址</param>
/// <param name="topic">Kafka消息所属的主题</param>
/// <param name="groupID">Kafka消费者所属的组别</param>
public void Consume(string broker, string topic, string groupID)
{
Task.Run(() =>
{
try
{
if (string.IsNullOrEmpty(broker) || string.IsNullOrWhiteSpace(broker) || broker.Length <= 0)
{
throw new ArgumentException("Kafka消息服务器的地址不能为空!");
}
if (string.IsNullOrEmpty(topic) || string.IsNullOrWhiteSpace(topic) || topic.Length <= 0)
{
throw new ArgumentNullException("Kafka消息所属的主题不能为空!");
}
if (string.IsNullOrEmpty(groupID) || string.IsNullOrWhiteSpace(groupID) || groupID.Length <= 0)
{
throw new ArgumentException("用户分组ID不能为空!");
}
var config = new ConsumerConfig
{
GroupId = groupID,
BootstrapServers = broker,
SecurityProtocol = SecurityProtocol.SaslPlaintext,
SaslMechanism = SaslMechanism.Plain,
StatisticsIntervalMs = 5000,
EnableAutoCommit = false, //手动提交
//EnableAutoCommit = true, //自动提交
//AutoCommitIntervalMs = 5000, //自动提交偏移量的间隔时间,默认值为 5,000 毫秒(即5秒)
HeartbeatIntervalMs = 3000, //消费者发送心跳的间隔时间,默认值为 3,000 毫秒(即3秒)
SessionTimeoutMs = 10000, //消费者与 Kafka broker 之间的会话超时时间,默认值为 10,000 毫秒(即10秒)
MaxPollIntervalMs = 300000, //两次 poll 调用之间的最大时间间隔,默认值为 300,000 毫秒(即5分钟)
AutoOffsetReset = AutoOffsetReset.Latest, //最新消息
//AutoOffsetReset = AutoOffsetReset.Earliest,
EnablePartitionEof = true,
PartitionAssignmentStrategy = PartitionAssignmentStrategy.CooperativeSticky,
SaslUsername = "*****", //SASL账户
SaslPassword = "***************", //SASL密码
};
using (var consumer = new ConsumerBuilder<Ignore, string>(config).Build())
{
consumer.Subscribe(topic); //订阅主题
while (IsCancelled) //循环消费
{
WriteLog("KafkaLog", "Kafka Consumer Starting...");
OnKafkaEventHandler("Kafka Consumer Starting...");
var consumerResult = consumer.Consume(); //消费Kafka
//var consumerResult = consumer.Consume(60000); //消费Kafka 没有消息时设置超时时间
if (consumerResult.Message != null)
{
var key = consumerResult.Message.Key;
var message = consumerResult.Message.Value;
//显示Kafka数据
//OnKafkaEventHandler(message);
//记录Log
WriteLog("KafkaLog", "Kafka Consumer Succeed");
OnKafkaEventHandler("Kafka Consumer Succeed");
//插入数据库
InsertOracleDataBase(message);
//手动提交
consumer.Commit(consumerResult);
WriteLog("KafkaLog", "Kafka Commit Succeed");
OnKafkaEventHandler("Kafka Commit Succeed");
}
else
{
WriteLog("KafkaLog", "Kafka Consumer NoData");
OnKafkaEventHandler("Kafka Consumer NoData");
}
}
}
}
catch (Exception ex)
{
IsCancelled = false;
WriteLog("KafkaLog", ex.Message);
OnKafkaEventHandler(ex.Message);
}
});
}
Kafka Consumer
于 2023-07-05 17:16:42 首次发布