ELK单机环境测试

主机环境为:

centos 6.8 X64

各软件及版本:

kibana-4.5.3-linux-x64.tar.gz
elasticsearch-2.3.4.rpm
logstash-2.3.2.tar.gz

filebeat-1.2.3-x86_64.rpm
jdk-8u121-linux-x64.rpm

下载地址:

http://pan.baidu.com/s/1pLGzoYR

 

需要的主机及安装到的软件:

192.168.40.83   iptables2
kibana-4.5.3-linux-x64.tar.gz
elasticsearch-2.3.4.rpm
logstash-2.3.2.tar.gz
jdk-8u121-linux-x64.rpm

192.168.40.103  test2
filebeat-1.2.3-x86_64.rpm
192.168.40.101  test1
filebeat-1.2.3-x86_64.rpm

elk日志收集架构

image

这个是官网的

https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-overview.html

filebeat收集到的日志可以发送到Elasticsearch/logstash/Kafka/redis中,本实验中收集的日志发送到logstash,logstash通过fileter段处理后发送到Elasticsearch,用户通过访问kibana进行数据展示,这里不进行filebeat、logstash、Elasticsearch、kibana原理讲解

image

iptables2
1.准备好jdk
yum localinstall -y jdk-8u121-linux-x64.rpm
[root@iptables2 ~]# cat /etc/profile.d/java.sh
export JAVA_HOME=/usr/java/latest
export PATH=/usr/java/latest/bin:${PATH}

2.安装Elasticsearch
[root@iptables2 ~]# yum localinstall -y elasticsearch-2.3.4.rpm
安装插件
使用ES自带的命令plugin 
# head
/usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head
# kopf
/usr/share/elasticsearch/bin/plugin install lmenezes/elasticsearch-kopf
# bigdesk
/usr/share/elasticsearch/bin/plugin install hlstudio/bigdesk

那如何访问安装好的插件呢?
http://ES_server_ip:port/_plugin/plugin_name
Example:
http://192.168.40.83:9200/_plugin/head/
http://192.168.40.83:9200/_plugin/kopf/
编辑配置文件
[root@iptables2 ~]# vim /etc/elasticsearch/elasticsearch.yml
# ---------------------------------- Network -----------------------------------
#
# Set the bind address to a specific IP (IPv4 or IPv6):
#
network.host: 0.0.0.0
#
# Set a custom port for HTTP:
#
http.port: 9200

启动Elasticsearch
[root@iptables2 ~]# service elasticsearch start

安装logstash
tar xf logstash-2.3.2.tar.gz
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
mkdir logstash-2.3.2/conf
unzip GeoLiteCity.dat.gz
cat logstash-2.3.2/conf/ver11.conf
input {
    beats {
        port => 5044
        type => "syslog"
    }
}

filter {
    if [type] == "filebeat" {
        grok {
            match => [ "message", "%{SYSLOGLINE}" ]
            overwrite => [ "message" ]
        }
    }
    date {
        match => [ "timestamp", "MMM dd HH:mm:ss", "MMM  d HH:mm:ss" ]
    }
    if [type] == "nginxacclog" {
        grok {
            match => {
                "message" => "%{IP:client} - (?:%{USERNAME:remote_user}|-) \[%{HTTPDATE:timestamp}\] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" \"%{NUMBER:request_time:float}\" %{INT:status} %{NUMBER:bytes} \"(?:%{URI:referer}|-)\" \"(?:%{GREEDYDATA:user_agent}|-)\" (?:%{IP:x_forword_for}|-)"
            }
        }
        date {
            match => [ "timestamp","dd/MMM/YYYY:HH:mm:ss Z" ]
        }
        urldecode {
            all_fields => true
        }
    }
    if [type] == "test1log" {
        grok {
            patterns_dir => "/root/logstash-2.3.2/patterns/"
            match => {
                "message" => "%{IP:client} - - \[%{ELKTIMES:log_timestamp} \] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" %{INT:status} %{NUMBER:bytes} \"(?:%{URI:referer}|-)\" \"(?:%{GREEDYDATA:user_agent}|-)\""
            }
        }
        date {
            match => [ "log_timestamp","dd/MMM/YYYY:HH:mm:ss" ]
        }
        geoip {
            source => "client"
            target => "geoip"
            database => "/root/GeoLiteCity.dat"
            add_field => ["[geoip][coordinates]","%{[geoip][longitude]}"]
            add_field => ["[geoip][coordinates]","%{[geoip][latitude]}"]
        }
        mutate {
            convert => ["[geoip][coordinates]","float", "bytes","integer", "bytes.raw","integer"]
        }
        urldecode {
            all_fields => true
        }
    }
    if [type] == "loginmsg" {
        grok {
            match => {"message" => "%{SYSLOGPAMSESSION}"}
            match => {"message" => "%{SECURELOG}"}
            match => {"message" => "%{SYSLOGBASE2}"}
        }
        geoip {
            source => "IP"
            fields => ["city_name"]
            database => "/root/GeoLiteCity.dat"
        }
        if ([status] == "Accepted") {
            mutate {
                add_tag => ["Success"]
            }
        }
        else if ([status] == "Failed") {
            mutate {
                add_tag => ["Failed"]
            }
        }
    }
}

output {
    stdout {
        codec => rubydebug
    }
    elasticsearch {
        hosts => "192.168.40.83:9200"
    }
}
mkdir logstash-2.3.2/patterns
cat logstash-2.3.2/patterns/linux-syslog
SECURELOG %{WORD:program}\[%{DATA:pid}\]: %{WORD:status} password for ?(invaliduser)? %{WORD:USER} from %{DATA:IP} port
ELKTIMES %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME}
启动logstash
./logstash-2.3.2/bin/logstash -f logstash-2.3.2/conf/ver11.conf

安装kibana
tar xf /usr/local/src/kibana-4.5.3-linux-x64.tar.gz -C /usr/local
cd /usr/local/
ln -s kibana-4.5.3-linux-x64 kibana
cp kibana/config/kibana.yml kibaba/config/kibana.yml.bak_$(date +%F_%H:%M)
配置kibana.yml
server.port: 5601
server.host: "0.0.0.0"
其它内容不动
启动kibana
./kibana/bin/kibana
访问
http://192.168.40.83:5601/即可

test1
安装filebeat
yum localinstall -y filebeat-1.2.3-x86_64.rpm
cp /etc/filebeat/filebeat.yml{,$(date +%F_%H:%M)}
# cat /etc/filebeat/filebeat.yml
##################################################### filebeat #######################################################
filebeat:
  prospectors:
    -
      paths:
        - /var/log/messages
      input_type: log
      document_type: messages

    -
      paths:
        - /var/log/secure
      input_type: syslog
      document_type: loginmsg

    -
      paths:
        - /var/log/nginx_access.log 
      input_type: log
      document_type: nginxacclog

    -
      paths:
        - /usr/local/tomcat/logs/catalina.out
      input_type: catalina
      document_type: catalinalog
      multiline:
          pattern: '^[[:space:]]'
          negate: true
          match: after

  registry_file: /var/lib/filebeat/registry

##################################################### output #######################################################
output:
  logstash:
    hosts: ["192.168.40.83:5044"]

##################################################### Logging #######################################################
logging:
  files:
    rotateeverybytes: 10485760 # = 10MB
启动filebeat
service filebeat start

test2
安装filebeat
yum localinstall -y filebeat-1.2.3-x86_64.rpm
cp /etc/filebeat/filebeat.yml{,$(date +%F_%H:%M)}
# cat /etc/filebeat/filebeat.yml
##################################################### filebeat #######################################################
filebeat:
  prospectors:
    -
      paths:
        - /var/log/messages
      input_type: log
      document_type: messages

    -
      paths:
        - /var/log/secure
      input_type: syslog
      document_type: loginmsg

    -
      paths:
        - /var/log/nginx_access.log 
      input_type: log
      document_type: nginxacclog

    -
      paths:
        - /usr/local/tomcat/logs/catalina.out
      input_type: catalina
      document_type: catalinalog
      multiline:
          pattern: '^[[:space:]]'
          negate: true
          match: after

  registry_file: /var/lib/filebeat/registry

##################################################### output #######################################################
output:
  logstash:
    hosts: ["192.168.40.83:5044"]

##################################################### Logging #######################################################
logging:
  files:
    rotateeverybytes: 10485760 # = 10MB
启动filebeat
service filebeat start

 

查看Elasticsearch是否接收到了数据:

http://192.168.40.83:9200/_search?pretty

image

查看Elasticsearch中所有索引列表

http://192.168.40.83:9200/_aliases

image

访问kibana

http://192.168.40.83:5601/

image

参考链接:

logstash快速入门
http://www.2cto.com/os/201411/352015.html
使用 curl 命令发送请求来查看 ES 是否接收到了数据:
# curl 'http://localhost:9200/_search?pretty'
开源实时日志分析ELK平台部署
http://www.tuicool.com/articles/QFvARfr

ELK系列一:ELK安装配置及nginx日志分析
http://www.myhack58.com/Article/sort099/sort0102/2016/81990_3.htm
ELK系列二:kibana操作及nginx日志分析图表创建
http://www.myhack58.com/Article/sort099/sort0102/2016/81991.htm

ELK+Filebeat+Kafka+ZooKeeper 构建海量日志分析平台
http://tchuairen.blog.51cto.com/3848118/1861167

转载于:https://my.oschina.net/u/1762991/blog/887681

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值