Docker-Compose部署kafka教程
1. 环境
- Ubuntu 20以上
- Docker version 20以上
- Docker-Compose version 1.25以上
安装Docker和Docker-Compose教程 - Elasticsearch 7.17.4
部署Elasticsearch教程
2. 创建kafka.conf文件
vim kafka.conf
input {
kafka {
# Elasticsearch的ip:port
bootstrap_servers => "10.0.168.217:9092"
topics => ["developer"]
group_id => "dev-logs"
client_id => "developer"
decorate_events => true
}
}
output {
elasticsearch {
id => "dev-log-%{+YYYY.MM.dd}"
index => "dev-log-%{+YYYY.MM.dd}"
# Elasticsearch的账号
user => "elastic"
# Elasticsearch的密码
password => "elastic@2022"
template_overwrite => true
# Elasticsearch的ip:port
hosts => ["10.0.168.217:9200"]
}
}
3. 创建kafka的docker-compose.yml文件
version: '3'
services:
zookeeper:
image: zookeeper
container_name: pl_zookeeper
ports:
- 2181:2181
- 2888:2888
- 3888:3888
volumes:
- /home/data/zookeeper/data:/data
- /home/data/zookeeper/conf:/conf
restart: always
kafka:
image: wurstmeister/kafka
depends_on:
- zookeeper
container_name: pl_elk_kafka
ports:
- 9092:9092
environment:
KAFKA_BROKER_ID: 0
# 服务器内网ip:2181/kafka
KAFKA_ZOOKEEPER_CONNECT: 10.0.168.217:2181/kafka
# PLAINTEXT://服务器内网ip:port
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://10.0.168.217:9092
KAFKA_LISTENERS: PLAINTEXT://:9092
KAFKA_LOG_DIRS: /data/kafka-data
KAFKA_LOG_RETENTION_HOURS: 24
volumes:
- /home/data/elk/kafka/data:/data/kafka-data
- /etc/localtime:/etc/localtime
restart: unless-stopped
4. 启动kafka
docker-compose up -d