Skip to content

Commit 1df638b

Browse files
committed
unit tests
1 parent 1b1bba6 commit 1df638b

File tree

6 files changed

+468
-191
lines changed

6 files changed

+468
-191
lines changed

src/main/java/io/lumigo/core/SpansContainer.java

+3-2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import io.lumigo.core.utils.StringUtils;
1414
import io.lumigo.models.HttpSpan;
1515
import io.lumigo.models.KafkaSpan;
16+
import io.lumigo.models.KafkaSpanFactory;
1617
import io.lumigo.models.Span;
1718
import java.io.*;
1819
import java.util.*;
@@ -455,7 +456,7 @@ public <K, V> void addKafkaProduceSpan(
455456
RecordMetadata recordMetadata,
456457
Exception exception) {
457458
this.kafkaSpans.add(
458-
KafkaSpan.createProduce(
459+
KafkaSpanFactory.createProduce(
459460
this.baseSpan,
460461
startTime,
461462
keySerializer,
@@ -472,7 +473,7 @@ public void addKafkaConsumeSpan(
472473
ConsumerMetadata consumerMetadata,
473474
ConsumerRecords<?, ?> consumerRecords) {
474475
this.kafkaSpans.add(
475-
KafkaSpan.createConsume(
476+
KafkaSpanFactory.createConsume(
476477
this.baseSpan, startTime, consumer, consumerMetadata, consumerRecords));
477478
}
478479

src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaConsumerInstrumentation.java

+5-11
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,11 @@ public class ApacheKafkaConsumerInstrumentation implements LumigoInstrumentation
2121

2222
@Override
2323
public ElementMatcher<TypeDescription> getTypeMatcher() {
24-
System.out.println("Inside ApacheKafkaConsumerInstrumentation.getTypeMatcher()");
2524
return named("org.apache.kafka.clients.consumer.KafkaConsumer");
2625
}
2726

2827
@Override
2928
public AgentBuilder.Transformer.ForAdvice getTransformer() {
30-
System.out.println("Inside ApacheKafkaConsumerInstrumentation.getTransformer()");
3129
return new AgentBuilder.Transformer.ForAdvice()
3230
.include(Loader.class.getClassLoader())
3331
.advice(
@@ -44,15 +42,12 @@ public AgentBuilder.Transformer.ForAdvice getTransformer() {
4442

4543
public static class ApacheKafkaConsumerAdvice {
4644
public static final SpansContainer spansContainer = SpansContainer.getInstance();
47-
public static final LRUCache<Integer, Boolean> handled = new LRUCache<>(1000);
48-
public static final LRUCache<Integer, Long> startTimeMap = new LRUCache<>(1000);
45+
public static final LRUCache<String, Long> startTimeMap = new LRUCache<>(1000);
4946

5047
@Advice.OnMethodEnter(suppress = Throwable.class)
51-
public static void methodEnter() {
48+
public static void methodEnter(@Advice.FieldValue("clientId") String clientId) {
5249
try {
53-
System.out.println("Inside ApacheKafkaConsumerAdvice.methodEnter()");
54-
// TODO fix start time
55-
// startTimeMap.put(record.hashCode(), System.currentTimeMillis());
50+
startTimeMap.put(clientId, System.currentTimeMillis());
5651
} catch (Exception e) {
5752
Logger.error(e);
5853
}
@@ -62,13 +57,12 @@ public static void methodEnter() {
6257
public static void methodExit(
6358
@Advice.This KafkaConsumer<?, ?> consumer,
6459
@Advice.FieldValue("metadata") ConsumerMetadata metadata,
60+
@Advice.FieldValue("clientId") String clientId,
6561
@Advice.Return ConsumerRecords<?, ?> consumerRecords) {
6662
try {
67-
System.out.println("Inside ApacheKafkaConsumerAdvice.methodExit()");
6863
Logger.info("Handling kafka request {}", consumerRecords.hashCode());
6964
spansContainer.addKafkaConsumeSpan(
70-
System.currentTimeMillis(), consumer, metadata, consumerRecords);
71-
handled.put(consumerRecords.hashCode(), true);
65+
startTimeMap.get(clientId), consumer, metadata, consumerRecords);
7266
} catch (Throwable error) {
7367
Logger.error(error, "Failed to add kafka span");
7468
}

src/main/java/io/lumigo/core/instrumentation/impl/ApacheKafkaProducerInstrumentation.java

+1-6
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,11 @@ public class ApacheKafkaProducerInstrumentation implements LumigoInstrumentation
2525

2626
@Override
2727
public ElementMatcher<TypeDescription> getTypeMatcher() {
28-
System.out.println("Inside ApacheKafkaProducerInstrumentation.getTypeMatcher()");
2928
return named("org.apache.kafka.clients.producer.KafkaProducer");
3029
}
3130

3231
@Override
3332
public AgentBuilder.Transformer.ForAdvice getTransformer() {
34-
System.out.println("Inside ApacheKafkaProducerInstrumentation.getTransformer()");
3533
return new AgentBuilder.Transformer.ForAdvice()
3634
.include(Loader.class.getClassLoader())
3735
.advice(
@@ -63,7 +61,6 @@ public static <K, V> void methodEnter(
6361
@Advice.Argument(value = 0, readOnly = false) ProducerRecord<K, V> record,
6462
@Advice.Argument(value = 1, readOnly = false) Callback callback) {
6563
try {
66-
System.out.println("Inside ApacheKafkaProducerAdvice.methodEnter()");
6764
callback =
6865
new KafkaProducerCallback<>(
6966
callback,
@@ -98,9 +95,7 @@ public void onCompletion(RecordMetadata recordMetadata, Exception exception) {
9895
if (callback != null) {
9996
callback.onCompletion(recordMetadata, exception);
10097
}
101-
System.out.println("Inside KafkaProducerCallback.onCompletion()");
102-
103-
Logger.info("Handling kafka request {} from host {}", record.hashCode());
98+
Logger.info("Handling kafka request {}", record.hashCode());
10499
spansContainer.addKafkaProduceSpan(
105100
startTime,
106101
keySerializer,
+13-172
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,13 @@
11
package io.lumigo.models;
22

3-
import static io.lumigo.core.SpansContainer.KAFKA_SPAN_TYPE;
4-
53
import com.fasterxml.jackson.annotation.JsonProperty;
64
import java.util.*;
7-
import java.util.stream.Collectors;
85
import lombok.AllArgsConstructor;
96
import lombok.Builder;
107
import lombok.Data;
11-
import org.apache.kafka.clients.consumer.ConsumerRecords;
12-
import org.apache.kafka.clients.consumer.KafkaConsumer;
13-
import org.apache.kafka.clients.consumer.internals.ConsumerMetadata;
14-
import org.apache.kafka.clients.producer.ProducerRecord;
15-
import org.apache.kafka.clients.producer.RecordMetadata;
16-
import org.apache.kafka.clients.producer.internals.ProducerMetadata;
17-
import org.apache.kafka.common.header.Header;
18-
import org.apache.kafka.common.header.Headers;
19-
import org.apache.kafka.common.serialization.Serializer;
8+
import lombok.Getter;
209

21-
@Data
10+
@Getter
2211
@Builder(toBuilder = true)
2312
@AllArgsConstructor
2413
public class KafkaSpan {
@@ -37,7 +26,7 @@ public class KafkaSpan {
3726
private Info info;
3827

3928
@Builder(toBuilder = true)
40-
@Data(staticConstructor = "of")
29+
@Getter
4130
public static class Info {
4231
private KafkaSpan.Tracer tracer;
4332
private KafkaSpan.TraceId traceId;
@@ -50,15 +39,15 @@ public static class Info {
5039

5140
@AllArgsConstructor
5241
@Builder(toBuilder = true)
53-
@Data(staticConstructor = "of")
42+
@Getter
5443
public static class TraceId {
5544
@JsonProperty("Root")
5645
private String root;
5746
}
5847

5948
@AllArgsConstructor
6049
@Builder(toBuilder = true)
61-
@Data(staticConstructor = "of")
50+
@Getter
6251
public static class Tracer {
6352
private String version;
6453
}
@@ -67,9 +56,9 @@ public interface KafkaInfo {}
6756

6857
@AllArgsConstructor
6958
@Builder(toBuilder = true)
70-
@Data(staticConstructor = "of")
59+
@Getter
7160
public static class KafkaProducerInfo implements KafkaInfo {
72-
private final String kafkaInfoType = KAFKA_PRODUCER_TYPE;
61+
private static final String kafkaInfoType = KAFKA_PRODUCER_TYPE;
7362
private List<String> bootstrapServers;
7463
private String topic;
7564
private KafkaSpan.KafkaProducerRecord record;
@@ -78,7 +67,7 @@ public static class KafkaProducerInfo implements KafkaInfo {
7867

7968
@AllArgsConstructor
8069
@Builder(toBuilder = true)
81-
@Data(staticConstructor = "of")
70+
@Getter
8271
public static class KafkaProducerRecord {
8372
private byte[] key;
8473
private byte[] value;
@@ -89,24 +78,24 @@ public interface KafkaProducerResponse {}
8978

9079
@AllArgsConstructor
9180
@Builder(toBuilder = true)
92-
@Data(staticConstructor = "of")
81+
@Getter
9382
public static class KafkaProducerSuccessResponse implements KafkaProducerResponse {
9483
private Integer partition;
9584
private Long offset;
9685
}
9786

9887
@AllArgsConstructor
9988
@Builder(toBuilder = true)
100-
@Data(staticConstructor = "of")
89+
@Getter
10190
public static class KafkaProducerErrorResponse implements KafkaProducerResponse {
10291
private String errorMessage;
10392
}
10493

10594
@AllArgsConstructor
10695
@Builder(toBuilder = true)
107-
@Data(staticConstructor = "of")
96+
@Getter
10897
public static class KafkaConsumerInfo implements KafkaInfo {
109-
private final String kafkaInfoType = KAFKA_CONSUMER_TYPE;
98+
private static final String kafkaInfoType = KAFKA_CONSUMER_TYPE;
11099
private List<String> bootstrapServers;
111100
private String consumerGroupId;
112101
private Integer recordsCount;
@@ -116,7 +105,7 @@ public static class KafkaConsumerInfo implements KafkaInfo {
116105

117106
@AllArgsConstructor
118107
@Builder(toBuilder = true)
119-
@Data(staticConstructor = "of")
108+
@Getter
120109
public static class KafkaConsumerRecord {
121110
private String topic;
122111
private Integer partition;
@@ -125,152 +114,4 @@ public static class KafkaConsumerRecord {
125114
private String value;
126115
private Map<String, byte[]> headers;
127116
}
128-
129-
public static <K, V> KafkaSpan createProduce(
130-
Span baseSpan,
131-
Long startTime,
132-
Serializer<K> keySerializer,
133-
Serializer<V> valueSerializer,
134-
ProducerMetadata producerMetadata,
135-
ProducerRecord<K, V> record,
136-
RecordMetadata recordMetadata,
137-
Exception exception) {
138-
List<String> bootstrapServers =
139-
producerMetadata.fetch().nodes().stream()
140-
.map(node -> node.host() + ":" + node.port())
141-
.collect(Collectors.toList());
142-
String topic = record.topic();
143-
KafkaProducerRecord producerRecord =
144-
KafkaProducerRecord.builder()
145-
.key(
146-
keySerializer.serialize(
147-
record.topic(), record.headers(), record.key()))
148-
.value(
149-
valueSerializer.serialize(
150-
record.topic(), record.headers(), record.value()))
151-
.headers(extractHeaders(record.headers()))
152-
.build();
153-
154-
KafkaInfo info;
155-
if (exception == null) {
156-
info =
157-
KafkaSpan.KafkaProducerInfo.builder()
158-
.bootstrapServers(bootstrapServers)
159-
.topic(topic)
160-
.record(producerRecord)
161-
.response(
162-
KafkaProducerSuccessResponse.builder()
163-
.partition(recordMetadata.partition())
164-
.offset(recordMetadata.offset())
165-
.build())
166-
.build();
167-
} else {
168-
info =
169-
KafkaProducerInfo.builder()
170-
.bootstrapServers(bootstrapServers)
171-
.topic(topic)
172-
.record(producerRecord)
173-
.response(
174-
KafkaProducerErrorResponse.builder()
175-
.errorMessage(exception.getMessage())
176-
.build())
177-
.build();
178-
}
179-
180-
return new KafkaSpanBuilder()
181-
.id(UUID.randomUUID().toString())
182-
.started(startTime)
183-
.ended(System.currentTimeMillis())
184-
.type(KAFKA_SPAN_TYPE)
185-
.transactionId(baseSpan.getTransactionId())
186-
.account(baseSpan.getAccount())
187-
.region(baseSpan.getRegion())
188-
.token(baseSpan.getToken())
189-
.parentId(baseSpan.getId())
190-
.info(
191-
KafkaSpan.Info.builder()
192-
.tracer(
193-
KafkaSpan.Tracer.builder()
194-
.version(
195-
baseSpan.getInfo().getTracer().getVersion())
196-
.build())
197-
.traceId(
198-
KafkaSpan.TraceId.builder()
199-
.root(baseSpan.getInfo().getTraceId().getRoot())
200-
.build())
201-
.messageId(
202-
new String(
203-
record.headers()
204-
.lastHeader("lumigoMessageId")
205-
.value()))
206-
.kafkaInfo(info)
207-
.build())
208-
.build();
209-
}
210-
211-
public static KafkaSpan createConsume(
212-
Span baseSpan,
213-
Long startTime,
214-
KafkaConsumer<?, ?> consumer,
215-
ConsumerMetadata consumerMetadata,
216-
ConsumerRecords<?, ?> consumerRecords) {
217-
List<String> messageIds = new ArrayList<>();
218-
List<String> bootstrapServers =
219-
consumerMetadata.fetch().nodes().stream()
220-
.map(node -> node.host() + ":" + node.port())
221-
.collect(Collectors.toList());
222-
List<String> topics = new ArrayList<>(consumer.subscription());
223-
List<KafkaConsumerRecord> records = new ArrayList<>();
224-
consumerRecords.forEach(
225-
record -> {
226-
messageIds.add(
227-
new String(record.headers().lastHeader("lumigoMessageId").value()));
228-
records.add(
229-
KafkaConsumerRecord.builder()
230-
.topic(record.topic())
231-
.partition(record.partition())
232-
.offset(record.offset())
233-
.key(record.key().toString())
234-
.value(record.value().toString())
235-
.headers(extractHeaders(record.headers()))
236-
.build());
237-
});
238-
return KafkaSpan.builder()
239-
.id(UUID.randomUUID().toString())
240-
.started(startTime)
241-
.ended(System.currentTimeMillis())
242-
.type(KAFKA_SPAN_TYPE)
243-
.transactionId(baseSpan.getTransactionId())
244-
.account(baseSpan.getAccount())
245-
.region(baseSpan.getRegion())
246-
.token(baseSpan.getToken())
247-
.parentId(baseSpan.getId())
248-
.info(
249-
Info.builder()
250-
.tracer(
251-
KafkaSpan.Tracer.builder()
252-
.version(
253-
baseSpan.getInfo().getTracer().getVersion())
254-
.build())
255-
.traceId(
256-
KafkaSpan.TraceId.builder()
257-
.root(baseSpan.getInfo().getTraceId().getRoot())
258-
.build())
259-
.messageIds(messageIds)
260-
.kafkaInfo(
261-
KafkaSpan.KafkaConsumerInfo.builder()
262-
.bootstrapServers(bootstrapServers)
263-
.consumerGroupId(consumer.groupMetadata().groupId())
264-
.topics(topics)
265-
.recordsCount(consumerRecords.count())
266-
.records(records)
267-
.build())
268-
.build())
269-
.build();
270-
}
271-
272-
private static Map<String, byte[]> extractHeaders(Headers headers) {
273-
return Arrays.stream(headers.toArray())
274-
.collect(Collectors.toMap(Header::key, Header::value));
275-
}
276117
}

0 commit comments

Comments
 (0)