1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
|
package reporter
import (
"github.com/Shopify/sarama"
"github.com/sirupsen/logrus"
"../error_const"
)
// kafka server 端配置
type KafkaCfg struct {
Host string `json:"host"`
Topic string `json:"topic"`
}
// 消息生产者数据结构
type Reporter struct {
Producer sarama.SyncProducer
logger *logrus.Logger
}
// 构造函数
func NewReporter(cfg *KafkaCfg, log *logrus.Logger) *Reporter {
reporter := &Reporter{
logger: log,
}
reporter.setProducer(cfg)
return reporter
}
func (reporter *Reporter) setProducer(cfg *KafkaCfg) {
var broker = []string{cfg.Host}
config := sarama.NewConfig()
config.Producer.Return.Successes = true
// new 一个Producer
producer, err := sarama.NewSyncProducer(broker, config)
if err != nil {
reporter.logger.Errorf(error_const.InitProducerError, err)
}
reporter.Producer = producer
}
func (reporter *Reporter) DoReport(topic string, msg []byte) {
reporter.do(topic, msg)
}
func (reporter *Reporter) do(topic string, msg []byte) {
kafkaMsg := generateProducerMessage(topic, msg)
_, _, err := reporter.Producer.SendMessage(kafkaMsg)
if err != nil {
reporter.logger.Errorf(error_const.ReportKafkaMsgError, err, string(msg))
}
reporter.logger.Infof(error_const.ReportKafkaMsgSuccess, string(msg))
}
func generateProducerMessage(topic string, message []byte) *sarama.ProducerMessage {
return &sarama.ProducerMessage{
Topic: topic,
Partition: -1,
Value: sarama.StringEncoder(message),
}
}
|