前面我们将logTransfor的配置初始化成功了, 下面将从Kafka中消费数据, 并将日志入库到Elasticsearch并通过Kibana进行展示
将日志保存到ES中
初始化Es
在main函数中添加初始化InitEs函数1
2
3
4
5
6
7// 初始化Es
	err = es.InitEs(logConfig.EsAddr)
	if err != nil {
		logs.Error("初始化Elasticsearch失败, err:", err)
		return
	}
	logs.Debug("初始化Es成功")
初始化Eselasticsearch.go1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24package es
import (
	"fmt"
	"github.com/olivere/elastic/v7"
)
type Tweet struct {
	User    string
	Message string
}
var (
	esClient *elastic.Client
)
func InitEs(addr string) (err error) {
	client, err := elastic.NewClient(elastic.SetSniff(false), elastic.SetURL(addr))
	if err != nil {
		fmt.Println("connect es error", err)
		return nil
	}
	esClient = client
	return
}
注意: Es的地址需要填写为
http://localhost:9200/
运行main函数1
2
3020/03/28 18:01:53.360 [D]  初始化日志模块成功
2020/03/28 18:01:53.374 [D]  初始化Kafka成功
2020/03/28 18:01:53.381 [D]  初始化Es成功
编写run与SendToES函数
在main函数中添加run函数, 用于运行kafka消费数据到Es1
2
3
4
5
6// 运行
	err = run()
	if err != nil {
		logs.Error("运行错误, err:", err)
		return
	}
run.go1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40package main
import (
	"github.com/Shopify/sarama"
	"github.com/astaxie/beego/logs"
)
func run() (err error) {
	partitionList, err := kafkaClient.Client.Partitions(kafkaClient.Topic)
	if err != nil {
		logs.Error("Failed to get the list of partitions: ", err)
		return
	}
	for partition := range partitionList {
		pc, errRet := kafkaClient.Client.ConsumePartition(kafkaClient.Topic, int32(partition), sarama.OffsetNewest)
		if errRet != nil {
			err = errRet
			logs.Error("Failed to start consumer for partition %d: %s\n", partition, err)
			return
		}
		defer pc.AsyncClose()
		go func(pc sarama.PartitionConsumer) {
			kafkaClient.wg.Add(1)
			for msg := range pc.Messages() {
				logs.Debug("Partition:%d, Offset:%d, Key:%s, Value:%s", msg.Partition, msg.Offset, string(msg.Key), string(msg.Value))
				err = es.SendToES(kafkaClient.topic, msg.Value)
				if err != nil {
					logs.Warn("send to es failed, err:%v", err)
				}
			}
			kafkaClient.wg.Done()
		}(pc)
	}
	kafkaClient.wg.Wait()
	return
}
es.go1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43package es
import (
	"context"
	"fmt"
	"github.com/olivere/elastic/v7"
)
type LogMessage struct {
	App     string
	Topic   string
	Message string
}
var (
	esClient *elastic.Client
)
func InitEs(addr string) (err error) {
	client, err := elastic.NewClient(elastic.SetSniff(false), elastic.SetURL(addr))
	if err != nil {
		fmt.Println("connect es error", err)
		return nil
	}
	esClient = client
	return
}
func SendToES(topic string, data []byte) (err error) {
	msg := &LogMessage{}
	msg.Topic = topic
	msg.Message = string(data)
	_, err = esClient.Index().
		Index(topic).
		BodyJson(msg).
		Do(context.Background())
	if err != nil {
		panic(err)
		return
	}
	return
}
构建测试
运行logAgent数据, 从etcd中读取日志配置, 经过Kafka生产
| 1 | 2020/03/28 20:25:17.581 [D] 导入日志成功&{debug E:\\Go\\logCollect\\logAgent\\logs\\my.log 100 0.0.0.0:9092 [] 0.0.0.0:2379 /backend/logagent/config/} | 
启动logTransfer, 收集日志信息, 并将日志信息传入Es中1
2
3
4
5
6
7
8
9
10
11
12
13
142020/03/28 20:29:17.109 [D]  初始化日志模块成功
2020/03/28 20:29:17.118 [D]  初始化Kafka成功
2020/03/28 20:29:17.120 [D]  初始化Es成功
2020/03/28 20:29:25.518 [D]  Partition:0, Offset:153210, Key:, Value:2020/03/28 20:29:25.265 [D]  开始监控key: /backend/logagent/config/192.168.0.1
2020/03/28 20:29:27.361 [D]  Partition:0, Offset:153211, Key:, Value:2020/03/28 20:29:25.265 [D]  开始监控key: /backend/logagent/config/169.254.30.148
2020/03/28 20:29:27.531 [D]  Partition:0, Offset:153212, Key:, Value:2020/03/28 20:29:25.265 [D]  开始监控key: /backend/logagent/config/169.254.153.68
2020/03/28 20:29:27.743 [D]  Partition:0, Offset:153213, Key:, Value:2020/03/28 20:29:25.266 [D]  开始监控key: /backend/logagent/config/192.168.0.11
2020/03/28 20:29:27.870 [D]  Partition:0, Offset:153214, Key:, Value:2020/03/28 20:29:25.267 [D]  初始化Kafka producer成功,地址为: 0.0.0.0:9092
2020/03/28 20:29:28.012 [D]  Partition:0, Offset:153215, Key:, Value:2020/03/28 20:29:25.267 [D]  初始化Kafka成功!
2020/03/28 20:29:28.191 [D]  Partition:0, Offset:153216, Key:, Value:2020/03/28 20:29:25.518 [D]  read success, pid:0, offset:153210, topic:nginx_log
2020/03/28 20:29:28.311 [D]  Partition:0, Offset:153217, Key:, Value:
2020/03/28 20:29:28.456 [D]  Partition:0, Offset:153218, Key:, Value:2020/03/28 20:29:25.521 [D]  read success, pid:0, offset:153211, topic:nginx_log
2020/03/28 20:29:28.600 [D]  Partition:0, Offset:153219, Key:, Value:
2020/03/28 20:29:28.744 [D]  Partition:0, Offset:153220, Key:, Value:2020/03/28 20:29:25.522 [D]  read success, pid:0, offset:153212, topic:nginx_log
打开http://localhost:5601/ kibana
添加刚才传入的Topic为索引, 然后打开Discover查看
已经能看到最新的数据了, 成功收集!