forked from checkr/openmock
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathkafka.go
150 lines (136 loc) · 3.37 KB
/
kafka.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
package openmock
import (
"github.com/Shopify/sarama"
cluster "github.com/bsm/sarama-cluster"
"github.com/sirupsen/logrus"
)
// KafkaPipelineFunc defines pipeline functions
// For example, decode/encode messages
type KafkaPipelineFunc func(c Context, in []byte) (out []byte, err error)
// DefaultPipelineFunc directly outputs the in bytes
var DefaultPipelineFunc = func(c Context, in []byte) (out []byte, err error) {
return in, nil
}
type kafkaClient struct {
clientID string
brokers []string
producer sarama.SyncProducer
consumers []*cluster.Consumer
cFunc KafkaPipelineFunc
pFunc KafkaPipelineFunc
}
func (kc *kafkaClient) sendMessage(topic string, bytes []byte) (err error) {
var out []byte
defer func() {
logrus.WithFields(logrus.Fields{
"err": err,
"topic": topic,
"payload": string(bytes),
"out_message": string(out),
}).Info("try to publish to kafka")
}()
c := Context{
KafkaTopic: topic,
KafkaPayload: string(bytes),
}
out, err = kc.pFunc(c, bytes)
if err != nil {
return err
}
msg := &sarama.ProducerMessage{
Topic: topic,
Value: sarama.ByteEncoder(out),
}
_, _, err = kc.producer.SendMessage(msg)
return err
}
func (kc *kafkaClient) close() error {
if kc == nil {
return nil
}
for _, consumer := range kc.consumers {
if err := consumer.Close(); err != nil {
return err
}
}
if err := kc.producer.Close(); err != nil {
return err
}
return nil
}
func (om *OpenMock) configKafka() error {
producer, err := sarama.NewSyncProducer(om.KafkaSeedBrokers, nil)
if err != nil {
return err
}
kc := &kafkaClient{
clientID: om.KafkaClientID,
brokers: om.KafkaSeedBrokers,
producer: producer,
cFunc: DefaultPipelineFunc,
pFunc: DefaultPipelineFunc,
}
if om.KafkaConsumePipelineFunc != nil {
kc.cFunc = om.KafkaConsumePipelineFunc
}
if om.KafkaPublishPipelineFunc != nil {
kc.pFunc = om.KafkaPublishPipelineFunc
}
om.kafkaClient = kc
return nil
}
func (om *OpenMock) startKafka() {
if err := om.configKafka(); err != nil {
logrus.WithFields(logrus.Fields{
"err": err,
}).Fatal("failed to config kafka")
return
}
for kafka, ms := range om.repo.KafkaMocks {
go func(kafka ExpectKafka, ms MocksArray) {
consumer, err := cluster.NewConsumer(
om.kafkaClient.brokers,
om.kafkaClient.clientID,
[]string{kafka.Topic},
nil,
)
if err != nil {
logrus.WithFields(logrus.Fields{
"err": err,
"topic": kafka.Topic,
}).Fatal("failed to create a consumer")
return
}
logrus.Infof("consumer started for topic:%s", kafka.Topic)
om.kafkaClient.consumers = append(om.kafkaClient.consumers, consumer)
//nolint:gosimple
for {
select {
case msg, ok := <-consumer.Messages():
if ok {
c := Context{
KafkaTopic: msg.Topic,
KafkaPayload: string(msg.Value),
om: om,
}
payload, err := om.kafkaClient.cFunc(c, msg.Value)
if err != nil {
logrus.WithFields(logrus.Fields{
"err": err,
"topic": kafka.Topic,
}).Errorf("failed to decode msg when consume the message")
return
}
c.KafkaPayload = string(payload)
newOmLogger(c).WithFields(logrus.Fields{
"topic": msg.Topic,
"payload": c.KafkaPayload,
}).Info("start_consuming_message")
ms.DoActions(c)
consumer.MarkOffset(msg, "")
}
}
}
}(kafka, ms)
}
}