9
9
import net .bytebuddy .asm .Advice ;
10
10
import net .bytebuddy .description .type .TypeDescription ;
11
11
import net .bytebuddy .matcher .ElementMatcher ;
12
+ import org .apache .kafka .clients .consumer .internals .ConsumerMetadata ;
12
13
import org .apache .kafka .clients .producer .Callback ;
13
14
import org .apache .kafka .clients .producer .ProducerRecord ;
14
15
import org .apache .kafka .clients .producer .RecordMetadata ;
16
+ import org .apache .kafka .clients .producer .internals .ProducerMetadata ;
15
17
import org .pmw .tinylog .Logger ;
16
18
17
19
import java .util .UUID ;
@@ -24,13 +26,13 @@ public class ApacheKafkaProducerInstrumentation implements LumigoInstrumentation
24
26
25
27
@ Override
26
28
public ElementMatcher <TypeDescription > getTypeMatcher () {
27
- System .out .println ("Inside ApacheKafkaInstrumentation .getTypeMatcher()" );
29
+ System .out .println ("Inside ApacheKafkaProducerInstrumentation .getTypeMatcher()" );
28
30
return named ("org.apache.kafka.clients.producer.KafkaProducer" );
29
31
}
30
32
31
33
@ Override
32
34
public AgentBuilder .Transformer .ForAdvice getTransformer () {
33
- System .out .println ("Inside ApacheKafkaInstrumentation .getTransformer()" );
35
+ System .out .println ("Inside ApacheKafkaProducerInstrumentation .getTransformer()" );
34
36
return new AgentBuilder .Transformer .ForAdvice ()
35
37
.include (Loader .class .getClassLoader ())
36
38
.advice (
@@ -39,25 +41,23 @@ public AgentBuilder.Transformer.ForAdvice getTransformer() {
39
41
.and (named ("send" ))
40
42
.and (takesArgument (0 , named ("org.apache.kafka.clients.producer.ProducerRecord" ))
41
43
.and (takesArgument (1 , named ("org.apache.kafka.clients.producer.Callback" )))),
42
- ApacheKafkaAdvice .class .getName ());
44
+ ApacheKafkaProducerAdvice .class .getName ());
43
45
}
44
46
45
- public static class ApacheKafkaAdvice {
47
+ public static class ApacheKafkaProducerAdvice {
46
48
public static final SpansContainer spansContainer = SpansContainer .getInstance ();
47
49
public static final LRUCache <Integer , Boolean > handled = new LRUCache <>(1000 );
48
- public static final LRUCache <Integer , Long > startTimeMap = new LRUCache <>(1000 );
49
50
50
51
@ Advice .OnMethodEnter
51
52
public static void methodEnter (
52
- @ Advice .Argument (value = 0 , readOnly = false ) ProducerRecord record ,
53
+ @ Advice .FieldValue ("metadata" ) ProducerMetadata metadata ,
54
+ @ Advice .Argument (value = 0 , readOnly = false ) ProducerRecord <?, ?> record ,
53
55
@ Advice .Argument (value = 1 , readOnly = false ) Callback callback ) {
54
56
try {
55
- System .out .println ("Inside ApacheKafkaAdvice.methodEnter()" );
56
- startTimeMap .put (record .hashCode (), System .currentTimeMillis ());
57
- callback = new KafkaProducerCallback (callback , record );
57
+ System .out .println ("Inside ApacheKafkaProducerAdvice.methodEnter()" );
58
+ callback = new KafkaProducerCallback (callback , metadata , record , System .currentTimeMillis ());
58
59
59
60
// Try to inject correlation id to the kafka record headers
60
- // TODO dd injecting time in queue to the record headers
61
61
record .headers ().add ("lumigoMessageId" , UUID .randomUUID ().toString ().substring (0 , 10 ).getBytes ());
62
62
} catch (Exception e ) {
63
63
Logger .error (e );
@@ -67,7 +67,9 @@ public static void methodEnter(
67
67
@ AllArgsConstructor
68
68
public static class KafkaProducerCallback implements Callback {
69
69
private final Callback callback ;
70
- private final ProducerRecord record ;
70
+ private final ProducerMetadata producerMetadata ;
71
+ private final ProducerRecord <?, ?> record ;
72
+ private final long startTime ;
71
73
72
74
@ Override
73
75
public void onCompletion (RecordMetadata recordMetadata , Exception exception ) {
@@ -76,13 +78,10 @@ public void onCompletion(RecordMetadata recordMetadata, Exception exception) {
76
78
callback .onCompletion (recordMetadata , exception );
77
79
}
78
80
System .out .println ("Inside KafkaProducerCallback.onCompletion()" );
79
- if (handled .get (record .hashCode ()) == null ) {
80
- Logger .info ("Handling kafka request {} from host {}" , record .hashCode ());
81
- spansContainer .addKafkaProduceSpan (startTimeMap .get (record .hashCode ()), record );
82
- handled .put (record .hashCode (), true );
83
- } else {
84
- Logger .warn ("Already handle kafka request {} for host {}" , record .hashCode ());
85
- }
81
+
82
+ Logger .info ("Handling kafka request {} from host {}" , record .hashCode ());
83
+ spansContainer .addKafkaProduceSpan (startTime , producerMetadata , record , recordMetadata , exception );
84
+ handled .put (record .hashCode (), true );
86
85
} catch (Throwable error ) {
87
86
Logger .error (error , "Failed to add kafka span" );
88
87
}
0 commit comments