项目介绍:日志收集项目
主要功能:收集日志并且可视化查询日志
开发环境 :windos+idea
项目描述:收集日志并且在可视化页面中查询日志信息
主要技术:etcd+kafka+ElasticSearch+Kibana
负责模块:kafka收集日志,送到ElasticSearch中。kibana展示
初始化etcd连接。然后从etcd中拉取药收集日志的配置项目 然后etcd派一个watch取监听kafka 收集到日志发送到kafka 然后程序把日志文件发送到ElasticSearch中 采用kibana读取出来
logtransfer.ini配置文件编辑如下
[kafka]
address=127.0.0.1:9092
topic=web_log
[es]
address=127.0.0.1:9200
index=web
max_chan_size=10000
goroutine_num=16
结构体声明models.go
package model
//model
type Config struct {
KafkaConf `ini:"kafka"`
ESConf `ini:"es"`
}
type KafkaConf struct {
Address string `ini:"address"`
Topic string `ini:"topic"`
}
type ESConf struct {
Address string `ini:"address"`
Index string `ini:"index"`
MaxSize int `ini:"max_chan_size"`
GoNum int `ini:"goroutine_num"`
}
main函数
package main
import (
"fmt"
"github.com/go-ini/ini"
"log_transfer/es"
"log_transfer/kafka"
"log_transfer/model"
)
//log transfer
//从kafka消费日志数据,写入es
func main() {
//加载配置文件
var cfg = new(model.Config)
err := ini.MapTo(cfg, "./config/logtransfer.ini")
if err != nil {
fmt.Println("load config failed,err:%v\n", err)
panic(err)
}
fmt.Println("load config success")
//2.连接es
err = es.Init(cfg.ESConf.Address, cfg.ESConf.Index, cfg.ESConf.GoNum, cfg.ESConf.MaxSize)
if err != nil {
fmt.Printf("init es failed,err:%v\n", err)
panic(err)
}
fmt.Println("connect ES success")
//3.连接kafka
err = kafka.Init([]string{cfg.KafkaConf.Address}, cfg.KafkaConf.Topic)
if err != nil {
fmt.Println("connect to kafka failed,err:%v\n", err)
panic(err)
}
fmt.Println("connect kafka success")
//都在这停顿
select {}
}
注意一点一定要先初始化es 因为防止kafka这边传来消息的时候。es还没有初始化完成。造成消息堵塞
es代码模块
package es
import (
"context"
"fmt"
"github.com/olivere/elastic"
)
//将日志数据写入Elasticsearch
type ESClient struct {
client *elastic.Client
index string
logDatachan chan interface{}
}
var (
esClient *ESClient
)
func Init(address, index string, goroutineNum, maxSize int) (err error) {
client, err := elastic.NewClient(elastic.SetURL("http://" + address))
if err != nil {
//Handle error
panic(err)
}
fmt.Printf("%#v\n", client)
esClient = &ESClient{
client: client,
index: index,
logDatachan: make(chan interface{}, maxSize),
}
fmt.Println("connect to es success")
//从通道中取出数据 写入kafka里面
for i := 0; i < goroutineNum; i++ {
go sendToES()
}
return
}
func sendToES() {
for m1 := range esClient.logDatachan {
//b, err := json.Marshal(m1)
//if err != nil {
// fmt.Printf("marshal m1 failed,err:%v\n", err)
// continue
//}
put1, err := esClient.client.Index().
Index(esClient.index).
BodyJson(m1).
Do(context.Background())
if err != nil {
//handle error
panic(err)
}
fmt.Printf("Indexed user %s to index %s,type %s\n", put1.Id, put1.Index, put1.Type)
}
}
//通过一次首字母大写的函数从包外接收msg发送到channel
func PutLogData(msg interface{}) {
esClient.logDatachan <- msg
}
kafka代码模块
package kafka
import (
"encoding/json"
"fmt"
"github.com/Shopify/sarama"
"log_transfer/es"
)
//初始化kafka连接
//从kafka里取出日志数据
func Init(addr []string, topic string) (err error) {
//创建新的消费者
consumer, err := sarama.NewConsumer(addr, nil)
if err != nil {
fmt.Printf("fail to start consumer,err:%v\n", err)
return
}
//拿到指定的topci下面的所有分区列表
partitionList, err := consumer.Partitions(topic) //根据topic取到所有的分区
if err != nil {
fmt.Printf("fail to get list of partition:err%v\n", err)
return
}
for partition := range partitionList { //遍历所有分区
//针对每个分区创建一个对应的分区消费者
var pc sarama.PartitionConsumer
pc, err = consumer.ConsumePartition(topic, int32(partition), sarama.OffsetNewest)
if err != nil {
fmt.Printf("failed to start consumer for partition %d,err:%v\n", partition, err)
return
}
//defer pc.AsyncClose()
//异步从每个分区消费信息
go func(partitionConsumer sarama.PartitionConsumer) {
for msg := range pc.Messages() {
//logDataChan <- msg //为了将同步流程异步化,所以将去除的日志的数据先放到channel中
fmt.Println(msg.Topic, string(msg.Value))
var m1 map[string]interface{}
err = json.Unmarshal(msg.Value, &m1)
if err != nil {
fmt.Printf("unmarshal msg failed,err:%v\n", err)
continue
}
es.PutLogData(m1)
}
}(pc)
}
return
}
依赖如下
module log_transfer
go 1.16
require (
github.com/Shopify/sarama v1.35.0 // indirect
github.com/go-ini/ini v1.66.6 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/olivere/elastic v6.2.37+incompatible // indirect
)