第一步:创建目录和配置文件
mkdir-p /data/server/nginx/conf/
vim nginx.conf
--------------------------------------nginx.conf-------------------------
worker_processes 1;
events {
worker_connections 1024;
}
http {
include mime.types;
default_type application/octet-stream;
# 定义日志格式,和你的 Logstash 中的 grok 过滤规则相匹配
log_format logstash '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for" '
'"$http_host" "$upstream_addr" '
'$request_time $upstream_response_time';
access_log /var/log/nginx/access.log logstash; # 设置指定的日志格式
error_log /var/log/nginx/error.log;
server {
server_name localhost;
listen 80;
charset utf-8;
location / {
root /usr/share/nginx/html;
index index.html index.htm;
}
}
}
------------------------------------------------------------------------
第二步:创建目录并授权
mkdir -p /data/server/nginx/logs
touch access.log
chmod 777 access.log
第三步:拉取nginx镜像并执行
docker pull nginx
docker run --name some-nginx -p 80:80 --add-host www.github01.com:192.168.20.38 -v /data/server/nginx/conf/nginx.conf:/etc/nginx/nginx.conf:ro -v /data/server/nginx/logs/access.log:/var/log/nginx/access.log -d nginx
第4步:拉取es镜像并执行,并放通iptables
docker run -d --name myes -p 9200 -p 9300 --add-host www.github.com:192.168.20.38 elasticsearch
iptables -I INPUT 2 -p tcp -m state --state NEW --dport 9200 -j ACCEPT
第5步:拉取kibana镜像并执行
docker run --name mykibana -e ELASTICSEARCH_URL=http://www.github.com:9200 -p 5601:5601 --add-host www.github.com:192.168.20.38 -d kibana
第6步:拉取zookeeper镜像、kafka镜像,并执行
docker pull wurstmeister/zookeeper
docker pull wurstmeister/kafka
docker run --name zookeeper -p 2181:2181 -t -d wurstmeister/zookeeper
docker run --name kafka -e HOST_IP=192.168.20.38 -e KAFKA_ADVERTISED_PORT=9092 -e KAFKA_BROKER_ID=1 -e ZK=zk -p 9092:9092 --link zookeeper:zk -t -d wurstmeister/kafka
第7步:进入kafka,创建topic
docker exec -it ${CONTAINER ID} /bin/bash
bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 1 --topic test
bin/kafka-topics.sh --list --zookeeper zookeeper:2181
bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test --from-beginning
第8步:创建目录,写配置文件,
mkdir -p /data/server/logstash/conf/
vim logstash.conf
----------------------------logstash.conf------------------------------
input {
file {
path => ["/data/server/nginx/logs/access.log"]
type => "nginx_log"
start_position => "beginning"
}
}
#filter {
#}
output {
elasticsearch {
hosts => ["www.github.com:9200"]
index => "logstash-nginx"
}
#stdout {
#codec => rubydebug
# }
}
-------------------------logstash.conf ------------------------------------------
--------------------------------------------------------------------------------
input {
file {
path => ["/data/server/nginx/logs/access.log"]
type => "nginx_log"
start_position => "beginning"
}
}
#filter {
#}
output {
kafka {
topic_id => "test"
bootstrap_servers => "192.168.20.38:9092"
}
}
--------------------------------------------------------------------------------
---------------------------------------------------------------------------------
input {
kafka {
zk_connect => "192.168.20.38:2181"
#group_id => ""
topic_id => "test"
codec => "plain"
reset_beginning => false
consumer_threads => 5
decorate_events => true
#add_field => { "[@metadata][type]" => "syslog" }
}
}
#filter {
#}
output {
elasticsearch {
hosts => ["www.github.com:9200"]
index => "logstash-nginx"
}
#stdout {
#codec => rubydebug
# }
}
----------------------------------------------------------------------------------
第9步,执行镜像:
#docker run -it -d --name=my_logstash --add-host www.github.com:192.168.20.38 -v /data/server/logstash/conf:/config-dir -v /data/server/nginx/logs/access.log:/data/server/nginx/logs/access.log logstash -f /config-dir/logstash.conf
docker run -it -d --name=my_logstash --add-host www.github.com:192.168.20.38 --link kafka -v /data/server/logstash/conf:/config-dir -v /data/server/nginx/logs/access.log:/data/server/nginx/logs/access.log logstash -f /config-dir/logstash.conf
docker run -it -d --name=my_kafka_logstash --add-host www.github.com:192.168.20.38 --link kafka -v /data/server/logstash/kafka_conf:/config-dir logstash -f /config-dir/logstash.conf
mkdir-p /data/server/nginx/conf/
vim nginx.conf
--------------------------------------nginx.conf-------------------------
worker_processes 1;
events {
worker_connections 1024;
}
http {
include mime.types;
default_type application/octet-stream;
# 定义日志格式,和你的 Logstash 中的 grok 过滤规则相匹配
log_format logstash '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for" '
'"$http_host" "$upstream_addr" '
'$request_time $upstream_response_time';
access_log /var/log/nginx/access.log logstash; # 设置指定的日志格式
error_log /var/log/nginx/error.log;
server {
server_name localhost;
listen 80;
charset utf-8;
location / {
root /usr/share/nginx/html;
index index.html index.htm;
}
}
}
------------------------------------------------------------------------
第二步:创建目录并授权
mkdir -p /data/server/nginx/logs
touch access.log
chmod 777 access.log
第三步:拉取nginx镜像并执行
docker pull nginx
docker run --name some-nginx -p 80:80 --add-host www.github01.com:192.168.20.38 -v /data/server/nginx/conf/nginx.conf:/etc/nginx/nginx.conf:ro -v /data/server/nginx/logs/access.log:/var/log/nginx/access.log -d nginx
第4步:拉取es镜像并执行,并放通iptables
docker run -d --name myes -p 9200 -p 9300 --add-host www.github.com:192.168.20.38 elasticsearch
iptables -I INPUT 2 -p tcp -m state --state NEW --dport 9200 -j ACCEPT
第5步:拉取kibana镜像并执行
docker run --name mykibana -e ELASTICSEARCH_URL=http://www.github.com:9200 -p 5601:5601 --add-host www.github.com:192.168.20.38 -d kibana
第6步:拉取zookeeper镜像、kafka镜像,并执行
docker pull wurstmeister/zookeeper
docker pull wurstmeister/kafka
docker run --name zookeeper -p 2181:2181 -t -d wurstmeister/zookeeper
docker run --name kafka -e HOST_IP=192.168.20.38 -e KAFKA_ADVERTISED_PORT=9092 -e KAFKA_BROKER_ID=1 -e ZK=zk -p 9092:9092 --link zookeeper:zk -t -d wurstmeister/kafka
第7步:进入kafka,创建topic
docker exec -it ${CONTAINER ID} /bin/bash
bin/kafka-topics.sh --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 1 --topic test
bin/kafka-topics.sh --list --zookeeper zookeeper:2181
bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test --from-beginning
第8步:创建目录,写配置文件,
mkdir -p /data/server/logstash/conf/
vim logstash.conf
----------------------------logstash.conf------------------------------
input {
file {
path => ["/data/server/nginx/logs/access.log"]
type => "nginx_log"
start_position => "beginning"
}
}
#filter {
#}
output {
elasticsearch {
hosts => ["www.github.com:9200"]
index => "logstash-nginx"
}
#stdout {
#codec => rubydebug
# }
}
-------------------------logstash.conf ------------------------------------------
--------------------------------------------------------------------------------
input {
file {
path => ["/data/server/nginx/logs/access.log"]
type => "nginx_log"
start_position => "beginning"
}
}
#filter {
#}
output {
kafka {
topic_id => "test"
bootstrap_servers => "192.168.20.38:9092"
}
}
--------------------------------------------------------------------------------
---------------------------------------------------------------------------------
input {
kafka {
zk_connect => "192.168.20.38:2181"
#group_id => ""
topic_id => "test"
codec => "plain"
reset_beginning => false
consumer_threads => 5
decorate_events => true
#add_field => { "[@metadata][type]" => "syslog" }
}
}
#filter {
#}
output {
elasticsearch {
hosts => ["www.github.com:9200"]
index => "logstash-nginx"
}
#stdout {
#codec => rubydebug
# }
}
----------------------------------------------------------------------------------
第9步,执行镜像:
#docker run -it -d --name=my_logstash --add-host www.github.com:192.168.20.38 -v /data/server/logstash/conf:/config-dir -v /data/server/nginx/logs/access.log:/data/server/nginx/logs/access.log logstash -f /config-dir/logstash.conf
docker run -it -d --name=my_logstash --add-host www.github.com:192.168.20.38 --link kafka -v /data/server/logstash/conf:/config-dir -v /data/server/nginx/logs/access.log:/data/server/nginx/logs/access.log logstash -f /config-dir/logstash.conf
docker run -it -d --name=my_kafka_logstash --add-host www.github.com:192.168.20.38 --link kafka -v /data/server/logstash/kafka_conf:/config-dir logstash -f /config-dir/logstash.conf