centos7搭建ELK日志分析平台

一、安装jdk

export JAVA_HOME=/usr/local/java/jdk1.8.0_91
export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$PATH:$JAVA_HOME/bin

二、安装redis

wget http://download.redis.io/releases/redis-3.0.0.tar.gz
tar -zxvf redis-3.0.0.tar.gz
cd redis-3.0.0
make MALLOC=libc
#启动redis
src/redis-server &
#测试redis
src/redis-cli

三、下载相应安装包

wget https://download.elastic.co/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/2.4.0/elasticsearch-2.4.0.tar.gz
wget https://download.elastic.co/logstash/logstash/logstash-2.4.0.tar.gz
wget https://download.elastic.co/kibana/kibana/kibana-4.6.1-linux-x86_64.tar.gz

下载会耗费一定时间,耐心等待...实在不行就迅雷

tar -zxvf logstash-2.4.0.tar.gz -C /usr/work/elk/soft/
tar -zxvf elasticsearch-2.4.0.tar.gz -C /usr/work/elk/soft/
tar -zxvf kibana-4.6.1-linux-x86_64.tar.gz -C /usr/work/elk/soft/

四、应用端配置logstash-agent

cd /usr/work/elk/soft/logstash-2.4.0
mkdir conf
touch logstash_agent.conf
vi logstash_agent.conf
#填写如下内容
input {
        file {
                type => "customer_service"
                #需要收集的日志文件
                path => ["/usr/local/fwyun-shop/console.log"]
        }
}
output {
        redis {
                host => "192.168.0.129"
                data_type => "list"
                key => "logstash:redis"
        }
}

保存后退出

nohup ./bin/logstash -f conf/logstash_agent.conf &

#不输出到nohup.out文件启动
# nohup ./bin/logstash -f conf/logstash_agent.conf >/dev/null 2>&1 &

五、服务端配置并启动elasticsearch

cd /usr/work/elk/soft/elasticsearch-2.4.0
vi config/elasticsearch.yml
#把network.host字段给反注释掉,把地址改为0.0.0.0(官方并没明确说要去改这配置,默认配置应该就可以了,不过实测的时候发现如果不做这修改,elasticsearch访问不了) 

#安装head管理工具
#bin/plugin install mobz/elasticsearch-head

启动elasticsearch

nohup ./bin/elasticsearch &
#不输入到nohup.out文件启动
#nohup ./bin/elasticsearch >/dev/null 2>&1 &

六、服务端配置logstash-indexer

cd logstash-2.4.0/
mkdir conf
touch logstash-indexer.conf
vi logstash-indexer.conf
#填写如下内容
input {
        redis {
                host => "localhost" #redis地址
                data_type => "list"
                key => "logstash:redis"
                type => "redis-input"
        }
}

filter {
  if [path] =~ "access" {
    mutate { replace => { "type" => "apache_access" } }
    grok {
      match => { "message" => "%{COMBINEDAPACHELOG}" }
    }
  }
  date {
    match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
  }
}

output {
  elasticsearch {
    hosts => ["localhost:9200"]
  }
  stdout { codec => rubydebug }
}

保存后退出

启动logstash-indexer

nohup ./bin/logstash -f conf/logstash-indexer.conf &

#不输出到nohup.out
#nohup ./bin/logstash -f conf/logstash-indexer.conf >/dev/null 2>&1 &

七、服务端安装kibana

cd kibana-4.6.1-linux-x86_64
nohup ./bin/kibana & 
#不输出到nohup.out文件启动
#nohup ./bin/kibana >/dev/null 2>&1 &

http://your-kibana-ip:5601/ 

八、问题解决

ERROR: [2] bootstrap checks failed
[1]: max file descriptors [4096] for elasticsearch process is too low, increase to at least [65536]

[root@localhost ~]# cp /etc/security/limits.conf /etc/security/limits.conf.bak
[root@localhost ~]# cat /etc/security/limits.conf | grep -v "seven" > /tmp/system_limits.conf
[root@localhost ~]# echo "es hard nofile 65536" >> /tmp/system_limits.conf 
[root@localhost ~]# echo "es soft nofile 65536" >> /tmp/system_limits.conf 
[root@localhost ~]# mv /tmp/system_limits.conf /etc/security/limits.conf
#修改后重新登录es用户,使用如下命令查看是否修改成功

[seven@localhost ~]$ ulimit -Hn
65536


[2]: max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144]

[root@localhost ~]# cat /etc/sysctl.conf | grep -v "vm.max_map_count" > /tmp/system_sysctl.conf
[root@localhost ~]# echo "vm.max_map_count=262144" >> /tmp/system_sysctl.conf
[root@localhost ~]# mv /tmp/system_sysctl.conf /etc/sysctl.conf
mv:是否覆盖"/etc/sysctl.conf"? y
[root@localhost ~]# sysctl -p
vm.max_map_count = 262144

 

转载于:https://my.oschina.net/fuxingCoder/blog/744426

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值