kafka2.2开启审计日志+logstash处理日志

1.kafka开启SASL

1.1 config/server.properties添加如下内容
listeners=SASL_PLAINTEXT://127.0.0.1:9092
security.inter.broker.protocol=SASL_PLAINTEXT
sasl.mechanism.inter.broker.protocol=PLAIN
sasl.enabled.mechanisms=PLAIN
authorizer.class.name = kafka.security.auth.SimpleAclAuthorizer
super.users=User:admin;User:alice
1.2 新建kafka server端的配置文件config/kafka_server_jaas.conf
KafkaServer {
    org.apache.kafka.common.security.plain.PlainLoginModule required
    username="admin"
    password="admin"
    user_admin="admin"
    user_alice="alice";
};
1.3 启动kafka的server,VM option加参数:
-Dlog4j.configuration=file:D:\workspace\project_IDEA\kafka-2.2.1\kafka-2.2.1-src\config\log4j.properties
-Djava.security.auth.login.config=D:\workspace\project_IDEA\kafka-2.2.1\kafka-2.2.1-src\config\kafka_server_jaas.conf
1.4 新建kafka client端的配置文件config/kafka_client_jaas.conf
KafkaClient {
  org.apache.kafka.common.security.plain.PlainLoginModule required
  username="admin"
  password="admin";
};
1.5 在producer与consumer代码中添加
System.setProperty("java.security.auth.login.config", "D:\\workspace\\project_IDEA\\kafka-2.2.1\\kafka-2.2.1-src\\config\\kafka_client_jaas.conf"); // 环境变量添加,需要输入配置文件的路径
        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.mechanism", "PLAIN");
1.6 修改config/log4j.properties的审计日志为DEBUG级别
# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
log4j.logger.kafka.authorizer.logger=DEBUG, authorizerAppender
log4j.additivity.kafka.authorizer.logger=false
1.7 启动生产者与消费者

2.处理kafka-authorizer.log日志

2.1 在linux执行命令:
vmhgfs-fuse .host:/ /mnt/hgfs #如果不设置自动挂载每次打开虚拟机都要执行这条语句
2.2 新建配置文件/config/kafka.conf
input {
	file {
		path => "/mnt/hgfs/share/kafka-authorizer.log"
		start_position => "beginning"
	}
}
filter {
	if ([message] =~"acls") {
         drop {}
     }
	mutate {
		split => ["message","="]
		add_field =>   {"date_time" => "%{[message][0]}"}
		add_field =>   {"user_array" => "%{[message][1]}"}
		add_field =>   {"operation" => "%{[message][2]}"}
		add_field =>   {"ip" => "%{[message][3]}"}
		add_field =>   {"topic" => "%{[message][4]}"}
	}
	grok {
        match => {
			"date_time" => "%{DATESTAMP:date_time}"
		}
		overwrite => ["date_time"]
    }
    grok {
        match => {
        	"user_array" => "%{NOTSPACE:user_array}"
        }
        overwrite => ["user_array"]
    }
    grok {
        match => {
        	"operation" => "%{NOTSPACE:operation}"
        }
        overwrite => ["operation"]
    }
    grok {
        match => {
        	"ip" => "%{IP:ip}"
        }
        overwrite => ["ip"]
    }
    grok {
        match => {
        	"topic" => "%{NOTSPACE:topic}"
        }
        overwrite => ["topic"]
    }
    mutate {
		split => ["user_array",":"]
		add_field =>   {"user" => "%{[user_array][1]}"}
		remove_field => ["user_array"]
		remove_field => ["message"]
	}
}
output {
	stdout{}
	if [operation] == "Read" or [operation] == "Write" or [operation] == "Describe" {
		if ([topic] =~"Topic") {
            jdbc {
                driver_jar_path => "/var/local/mysql-connector-java-8.0.13.jar"
                driver_class => "com.mysql.jdbc.Driver"
                connection_string => "jdbc:mysql://10.0.77.136:3306/logstash?user=root&password=123456&serverTimezone=GMT%2B8"
                statement => [ "insert into log_kafka_topic (TIME,user,operation,IP,topic) values (?,?,?,?,?)","%{date_time}","%{user}","%{operation}","%{ip}","%{topic}" ]
			}
     	}
     	if ([topic] =~"Group") {
            jdbc {
                driver_jar_path => "/var/local/mysql-connector-java-8.0.13.jar"
                driver_class => "com.mysql.jdbc.Driver"
                connection_string => "jdbc:mysql://10.0.77.136:3306/logstash?user=root&password=123456&serverTimezone=GMT%2B8"
                statement => [ "insert into log_kafka_group (TIME,user,operation,IP,topic_group) values (?,?,?,?,?)","%{date_time}","%{user}","%{operation}","%{ip}","%{topic}" ]
			}
     	}
	}
}
2.3 log_kafka_topic建表
CREATE TABLE log_kafka_topic(
	ID INT NOT NULL AUTO_INCREMENT,
	TIME VARCHAR(25) NOT NULL,
	user VARCHAR(25) NOT NULL,
	operation VARCHAR(20) NOT NULL,
	IP VARCHAR(20) NOT NULL,
	topic VARCHAR(50) NOT NULL,
    PRIMARY KEY (ID)
);
2.4 log_kafka_group建表
CREATE TABLE log_kafka_group(
	ID INT NOT NULL AUTO_INCREMENT,
	TIME VARCHAR(25) NOT NULL,
	user VARCHAR(25) NOT NULL,
	operation VARCHAR(20) NOT NULL,
	IP VARCHAR(20) NOT NULL,
	topic_group VARCHAR(50) NOT NULL,
    PRIMARY KEY (ID)
);
2.5 清空表
truncate table log_kafka_topic;
truncate table log_kafka_group;
2.6 启动logstash
./bin/logstash -f ./config/kafka.conf --path.data=/root/logstash/kafka
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值