1
1
package io .lumigo .models ;
2
2
3
- import static io .lumigo .core .SpansContainer .KAFKA_SPAN_TYPE ;
4
-
5
3
import com .fasterxml .jackson .annotation .JsonProperty ;
6
4
import java .util .*;
7
- import java .util .stream .Collectors ;
8
5
import lombok .AllArgsConstructor ;
9
6
import lombok .Builder ;
10
- import lombok .Data ;
11
- import org .apache .kafka .clients .consumer .ConsumerRecords ;
12
- import org .apache .kafka .clients .consumer .KafkaConsumer ;
13
- import org .apache .kafka .clients .consumer .internals .ConsumerMetadata ;
14
- import org .apache .kafka .clients .producer .ProducerRecord ;
15
- import org .apache .kafka .clients .producer .RecordMetadata ;
16
- import org .apache .kafka .clients .producer .internals .ProducerMetadata ;
17
- import org .apache .kafka .common .header .Header ;
18
- import org .apache .kafka .common .header .Headers ;
19
- import org .apache .kafka .common .serialization .Serializer ;
20
7
21
- @ Data
22
8
@ Builder (toBuilder = true )
23
- @ AllArgsConstructor
24
9
public class KafkaSpan {
25
10
private static final String KAFKA_PRODUCER_TYPE = "PRODUCER" ;
26
11
private static final String KAFKA_CONSUMER_TYPE = "CONSUMER" ;
@@ -37,7 +22,6 @@ public class KafkaSpan {
37
22
private Info info ;
38
23
39
24
@ Builder (toBuilder = true )
40
- @ Data (staticConstructor = "of" )
41
25
public static class Info {
42
26
private KafkaSpan .Tracer tracer ;
43
27
private KafkaSpan .TraceId traceId ;
@@ -50,35 +34,29 @@ public static class Info {
50
34
51
35
@ AllArgsConstructor
52
36
@ Builder (toBuilder = true )
53
- @ Data (staticConstructor = "of" )
54
37
public static class TraceId {
55
38
@ JsonProperty ("Root" )
56
39
private String root ;
57
40
}
58
41
59
42
@ AllArgsConstructor
60
43
@ Builder (toBuilder = true )
61
- @ Data (staticConstructor = "of" )
62
44
public static class Tracer {
63
45
private String version ;
64
46
}
65
47
66
48
public interface KafkaInfo {}
67
49
68
- @ AllArgsConstructor
69
50
@ Builder (toBuilder = true )
70
- @ Data (staticConstructor = "of" )
71
51
public static class KafkaProducerInfo implements KafkaInfo {
72
- private final String kafkaInfoType = KAFKA_PRODUCER_TYPE ;
52
+ private static final String kafkaInfoType = KAFKA_PRODUCER_TYPE ;
73
53
private List <String > bootstrapServers ;
74
54
private String topic ;
75
55
private KafkaSpan .KafkaProducerRecord record ;
76
56
private KafkaSpan .KafkaProducerResponse response ;
77
57
}
78
58
79
- @ AllArgsConstructor
80
59
@ Builder (toBuilder = true )
81
- @ Data (staticConstructor = "of" )
82
60
public static class KafkaProducerRecord {
83
61
private byte [] key ;
84
62
private byte [] value ;
@@ -87,36 +65,28 @@ public static class KafkaProducerRecord {
87
65
88
66
public interface KafkaProducerResponse {}
89
67
90
- @ AllArgsConstructor
91
68
@ Builder (toBuilder = true )
92
- @ Data (staticConstructor = "of" )
93
69
public static class KafkaProducerSuccessResponse implements KafkaProducerResponse {
94
70
private Integer partition ;
95
71
private Long offset ;
96
72
}
97
73
98
- @ AllArgsConstructor
99
74
@ Builder (toBuilder = true )
100
- @ Data (staticConstructor = "of" )
101
75
public static class KafkaProducerErrorResponse implements KafkaProducerResponse {
102
76
private String errorMessage ;
103
77
}
104
78
105
- @ AllArgsConstructor
106
79
@ Builder (toBuilder = true )
107
- @ Data (staticConstructor = "of" )
108
80
public static class KafkaConsumerInfo implements KafkaInfo {
109
- private final String kafkaInfoType = KAFKA_CONSUMER_TYPE ;
81
+ private static final String kafkaInfoType = KAFKA_CONSUMER_TYPE ;
110
82
private List <String > bootstrapServers ;
111
83
private String consumerGroupId ;
112
84
private Integer recordsCount ;
113
85
private List <String > topics ;
114
86
private List <KafkaSpan .KafkaConsumerRecord > records ;
115
87
}
116
88
117
- @ AllArgsConstructor
118
89
@ Builder (toBuilder = true )
119
- @ Data (staticConstructor = "of" )
120
90
public static class KafkaConsumerRecord {
121
91
private String topic ;
122
92
private Integer partition ;
@@ -125,152 +95,4 @@ public static class KafkaConsumerRecord {
125
95
private String value ;
126
96
private Map <String , byte []> headers ;
127
97
}
128
-
129
- public static <K , V > KafkaSpan createProduce (
130
- Span baseSpan ,
131
- Long startTime ,
132
- Serializer <K > keySerializer ,
133
- Serializer <V > valueSerializer ,
134
- ProducerMetadata producerMetadata ,
135
- ProducerRecord <K , V > record ,
136
- RecordMetadata recordMetadata ,
137
- Exception exception ) {
138
- List <String > bootstrapServers =
139
- producerMetadata .fetch ().nodes ().stream ()
140
- .map (node -> node .host () + ":" + node .port ())
141
- .collect (Collectors .toList ());
142
- String topic = record .topic ();
143
- KafkaProducerRecord producerRecord =
144
- KafkaProducerRecord .builder ()
145
- .key (
146
- keySerializer .serialize (
147
- record .topic (), record .headers (), record .key ()))
148
- .value (
149
- valueSerializer .serialize (
150
- record .topic (), record .headers (), record .value ()))
151
- .headers (extractHeaders (record .headers ()))
152
- .build ();
153
-
154
- KafkaInfo info ;
155
- if (exception == null ) {
156
- info =
157
- KafkaSpan .KafkaProducerInfo .builder ()
158
- .bootstrapServers (bootstrapServers )
159
- .topic (topic )
160
- .record (producerRecord )
161
- .response (
162
- KafkaProducerSuccessResponse .builder ()
163
- .partition (recordMetadata .partition ())
164
- .offset (recordMetadata .offset ())
165
- .build ())
166
- .build ();
167
- } else {
168
- info =
169
- KafkaProducerInfo .builder ()
170
- .bootstrapServers (bootstrapServers )
171
- .topic (topic )
172
- .record (producerRecord )
173
- .response (
174
- KafkaProducerErrorResponse .builder ()
175
- .errorMessage (exception .getMessage ())
176
- .build ())
177
- .build ();
178
- }
179
-
180
- return new KafkaSpanBuilder ()
181
- .id (UUID .randomUUID ().toString ())
182
- .started (startTime )
183
- .ended (System .currentTimeMillis ())
184
- .type (KAFKA_SPAN_TYPE )
185
- .transactionId (baseSpan .getTransactionId ())
186
- .account (baseSpan .getAccount ())
187
- .region (baseSpan .getRegion ())
188
- .token (baseSpan .getToken ())
189
- .parentId (baseSpan .getId ())
190
- .info (
191
- KafkaSpan .Info .builder ()
192
- .tracer (
193
- KafkaSpan .Tracer .builder ()
194
- .version (
195
- baseSpan .getInfo ().getTracer ().getVersion ())
196
- .build ())
197
- .traceId (
198
- KafkaSpan .TraceId .builder ()
199
- .root (baseSpan .getInfo ().getTraceId ().getRoot ())
200
- .build ())
201
- .messageId (
202
- new String (
203
- record .headers ()
204
- .lastHeader ("lumigoMessageId" )
205
- .value ()))
206
- .kafkaInfo (info )
207
- .build ())
208
- .build ();
209
- }
210
-
211
- public static KafkaSpan createConsume (
212
- Span baseSpan ,
213
- Long startTime ,
214
- KafkaConsumer <?, ?> consumer ,
215
- ConsumerMetadata consumerMetadata ,
216
- ConsumerRecords <?, ?> consumerRecords ) {
217
- List <String > messageIds = new ArrayList <>();
218
- List <String > bootstrapServers =
219
- consumerMetadata .fetch ().nodes ().stream ()
220
- .map (node -> node .host () + ":" + node .port ())
221
- .collect (Collectors .toList ());
222
- List <String > topics = new ArrayList <>(consumer .subscription ());
223
- List <KafkaConsumerRecord > records = new ArrayList <>();
224
- consumerRecords .forEach (
225
- record -> {
226
- messageIds .add (
227
- new String (record .headers ().lastHeader ("lumigoMessageId" ).value ()));
228
- records .add (
229
- KafkaConsumerRecord .builder ()
230
- .topic (record .topic ())
231
- .partition (record .partition ())
232
- .offset (record .offset ())
233
- .key (record .key ().toString ())
234
- .value (record .value ().toString ())
235
- .headers (extractHeaders (record .headers ()))
236
- .build ());
237
- });
238
- return KafkaSpan .builder ()
239
- .id (UUID .randomUUID ().toString ())
240
- .started (startTime )
241
- .ended (System .currentTimeMillis ())
242
- .type (KAFKA_SPAN_TYPE )
243
- .transactionId (baseSpan .getTransactionId ())
244
- .account (baseSpan .getAccount ())
245
- .region (baseSpan .getRegion ())
246
- .token (baseSpan .getToken ())
247
- .parentId (baseSpan .getId ())
248
- .info (
249
- Info .builder ()
250
- .tracer (
251
- KafkaSpan .Tracer .builder ()
252
- .version (
253
- baseSpan .getInfo ().getTracer ().getVersion ())
254
- .build ())
255
- .traceId (
256
- KafkaSpan .TraceId .builder ()
257
- .root (baseSpan .getInfo ().getTraceId ().getRoot ())
258
- .build ())
259
- .messageIds (messageIds )
260
- .kafkaInfo (
261
- KafkaSpan .KafkaConsumerInfo .builder ()
262
- .bootstrapServers (bootstrapServers )
263
- .consumerGroupId (consumer .groupMetadata ().groupId ())
264
- .topics (topics )
265
- .recordsCount (consumerRecords .count ())
266
- .records (records )
267
- .build ())
268
- .build ())
269
- .build ();
270
- }
271
-
272
- private static Map <String , byte []> extractHeaders (Headers headers ) {
273
- return Arrays .stream (headers .toArray ())
274
- .collect (Collectors .toMap (Header ::key , Header ::value ));
275
- }
276
98
}
0 commit comments