Skip to content

Commit

Permalink
WIP testing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
nayeem-kamal committed Sep 16, 2024
1 parent 235737f commit a6829ea
Show file tree
Hide file tree
Showing 20 changed files with 1,586 additions and 0 deletions.
59 changes: 59 additions & 0 deletions dd-java-agent/instrumentation/kafka-clients-3.1/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
muzzle {
pass {
group = "org.apache.kafka"
module = "kafka-clients"
versions = "[3.1.0,)"
assertInverse = false
}
}

apply from: "$rootDir/gradle/java.gradle"

addTestSuite('latestDepTest')
addTestSuite('iastLatestDepTest3')


java {
toolchain {
languageVersion.set(JavaLanguageVersion.of(17))
}
}
dependencies {
compileOnly group: 'org.apache.kafka', name: 'kafka-clients', version: '3.1.0'
implementation project(':dd-java-agent:instrumentation:kafka-common')

testImplementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.1.0'
testImplementation group: 'org.springframework.kafka', name: 'spring-kafka', version: '3.1.0'
testImplementation group: 'org.springframework.kafka', name: 'spring-kafka-test', version: '3.1.0'
testImplementation group: 'org.testcontainers', name: 'kafka', version: '1.17.0'
testImplementation group: 'javax.xml.bind', name: 'jaxb-api', version: '2.2.3'
testImplementation group: 'org.assertj', name: 'assertj-core', version: '2.9.+'
testImplementation group: 'org.mockito', name: 'mockito-core', version: '2.19.0'
testRuntimeOnly project(':dd-java-agent:instrumentation:spring-scheduling-3.1')
testImplementation(testFixtures(project(':dd-java-agent:agent-iast')))

//IAST
testRuntimeOnly project(':dd-java-agent:instrumentation:iast-instrumenter')
testRuntimeOnly project(':dd-java-agent:instrumentation:java-lang')
testRuntimeOnly project(':dd-java-agent:instrumentation:java-io')
testRuntimeOnly project(':dd-java-agent:instrumentation:jackson-core')
testImplementation(group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.10')


// Include latest version of kafka itself along with latest version of client libs.
// This seems to help with jar compatibility hell.
latestDepTestImplementation group: 'org.apache.kafka', name: 'kafka_2.13', version: '2.+'
latestDepTestImplementation group: 'org.apache.kafka', name: 'kafka-clients', version: '2.+'
latestDepTestImplementation group: 'org.springframework.kafka', name: 'spring-kafka', version: '3.+'
latestDepTestImplementation group: 'org.springframework.kafka', name: 'spring-kafka-test', version: '3.+'
latestDepTestImplementation group: 'org.assertj', name: 'assertj-core', version: '3.19.+'
latestDepTestImplementation libs.guava

}

configurations.testRuntimeClasspath {
// spock-core depends on assertj version that is not compatible with kafka-clients
resolutionStrategy.force 'org.assertj:assertj-core:2.9.1'
}


Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package datadog.trace.instrumentation.kafka_clients38;

import java.util.Objects;
import javax.annotation.Nullable;
import org.apache.kafka.clients.consumer.ConsumerGroupMetadata;

public class KafkaConsumerInfo {
private final String consumerGroup;
private final ConsumerGroupMetadata clientMetadata;
private final String bootstrapServers;

public KafkaConsumerInfo(
String consumerGroup, ConsumerGroupMetadata clientMetadata, String bootstrapServers) {
this.consumerGroup = consumerGroup;
this.clientMetadata = clientMetadata;
this.bootstrapServers = bootstrapServers;
}

public KafkaConsumerInfo(String consumerGroup, String bootstrapServers) {
this.consumerGroup = consumerGroup;
this.clientMetadata = null;
this.bootstrapServers = bootstrapServers;
}

@Nullable
public String getConsumerGroup() {
return consumerGroup;
}

@Nullable
public ConsumerGroupMetadata getClientMetadata() {
return clientMetadata;
}

@Nullable
public String getBootstrapServers() {
return bootstrapServers;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
KafkaConsumerInfo consumerInfo = (KafkaConsumerInfo) o;
return Objects.equals(consumerGroup, consumerInfo.consumerGroup)
&& Objects.equals(clientMetadata, consumerInfo.clientMetadata);
}

@Override
public int hashCode() {
return 31 * (null == consumerGroup ? 0 : consumerGroup.hashCode())
+ (null == clientMetadata ? 0 : clientMetadata.hashCode());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
package datadog.trace.instrumentation.kafka_clients38;

import static datadog.trace.agent.tooling.bytebuddy.matcher.HierarchyMatchers.extendsClass;
import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.nameStartsWith;
import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named;
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan;
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan;
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan;
import static datadog.trace.bootstrap.instrumentation.api.InstrumentationTags.KAFKA_RECORDS_COUNT;
import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.KAFKA_POLL;
import static net.bytebuddy.matcher.ElementMatchers.isConstructor;
import static net.bytebuddy.matcher.ElementMatchers.isMethod;
import static net.bytebuddy.matcher.ElementMatchers.isPublic;
import static net.bytebuddy.matcher.ElementMatchers.returns;
import static net.bytebuddy.matcher.ElementMatchers.takesArgument;
import static net.bytebuddy.matcher.ElementMatchers.takesArguments;

import com.google.auto.service.AutoService;
import datadog.trace.agent.tooling.Instrumenter;
import datadog.trace.agent.tooling.InstrumenterModule;
import datadog.trace.api.Config;
import datadog.trace.bootstrap.InstrumentationContext;
import datadog.trace.bootstrap.instrumentation.api.AgentScope;
import datadog.trace.bootstrap.instrumentation.api.AgentSpan;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.bytebuddy.asm.Advice;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerGroupMetadata;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

/**
* This instrumentation saves additional information from the KafkaConsumer, such as consumer group
* and cluster ID, in the context store for later use.
*/
@AutoService(InstrumenterModule.class)
public final class KafkaConsumerInfoInstrumentation extends InstrumenterModule.Tracing
implements Instrumenter.ForTypeHierarchy {

public KafkaConsumerInfoInstrumentation() {
super("kafka");
}

@Override
public Map<String, String> contextStore() {
Map<String, String> contextStores = new HashMap<>();
contextStores.put("org.apache.kafka.clients.Metadata", "java.lang.String");
contextStores.put(
"org.apache.kafka.clients.consumer.ConsumerRecords", KafkaConsumerInfo.class.getName());
contextStores.put(
"org.apache.kafka.clients.consumer.internals.ConsumerCoordinator",
KafkaConsumerInfo.class.getName());
contextStores.put(
"org.apache.kafka.clients.consumer.KafkaConsumer", KafkaConsumerInfo.class.getName());
return contextStores;
}

@Override
public String hierarchyMarkerType() {
return "org.apache.kafka.clients.consumer.KafkaConsumer";
}

@Override
public ElementMatcher<TypeDescription> hierarchyMatcher() {
return extendsClass(nameStartsWith(hierarchyMarkerType()));
}

@Override
public String[] helperClassNames() {
return new String[] {
packageName + ".KafkaDecorator", packageName + ".KafkaConsumerInfo",
};
}

@Override
public void methodAdvice(MethodTransformer transformer) {
transformer.applyAdvice(
isConstructor()
.and(takesArgument(0, named("org.apache.kafka.clients.consumer.ConsumerConfig")))
.and(takesArgument(1, named("org.apache.kafka.common.serialization.Deserializer")))
.and(takesArgument(2, named("org.apache.kafka.common.serialization.Deserializer"))),
KafkaConsumerInfoInstrumentation.class.getName() + "$ConstructorAdvice");

transformer.applyAdvice(
isMethod()
.and(isPublic())
.and(named("poll"))
.and(takesArguments(1))
.and(returns(named("org.apache.kafka.clients.consumer.ConsumerRecords"))),
KafkaConsumerInfoInstrumentation.class.getName() + "$RecordsAdvice");
}

public static class ConstructorAdvice {
@Advice.OnMethodExit(suppress = Throwable.class)
public static void captureGroup(
@Advice.This KafkaConsumer consumer, @Advice.Argument(0) ConsumerConfig consumerConfig) {
ConsumerGroupMetadata groupMetadata = consumer.groupMetadata();

String consumerGroup = consumerConfig.getString(ConsumerConfig.GROUP_ID_CONFIG);
String normalizedConsumerGroup =
consumerGroup != null && !consumerGroup.isEmpty() ? consumerGroup : null;

if (normalizedConsumerGroup == null) {
if (groupMetadata != null) {
normalizedConsumerGroup = groupMetadata.groupId();
}
}
List<String> bootstrapServersList =
consumerConfig.getList(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG);
String bootstrapServers = null;
if (bootstrapServersList != null && !bootstrapServersList.isEmpty()) {
bootstrapServers = String.join(",", bootstrapServersList);
}

KafkaConsumerInfo kafkaConsumerInfo;
if (Config.get().isDataStreamsEnabled()) {
kafkaConsumerInfo =
new KafkaConsumerInfo(normalizedConsumerGroup, groupMetadata, bootstrapServers);
} else {
kafkaConsumerInfo = new KafkaConsumerInfo(normalizedConsumerGroup, bootstrapServers);
}

if (kafkaConsumerInfo.getConsumerGroup() != null
|| kafkaConsumerInfo.getClientMetadata() != null) {
InstrumentationContext.get(KafkaConsumer.class, KafkaConsumerInfo.class)
.put(consumer, kafkaConsumerInfo);
// if (coordinator != null) {
// InstrumentationContext.get(ConsumerCoordinator.class, KafkaConsumerInfo.class)
// .put(coordinator, kafkaConsumerInfo);
// }
}
}

public static void muzzleCheck(ConsumerRecord record) {
// KafkaConsumerInstrumentation only applies for kafka versions with headers
// Make an explicit call so KafkaConsumerGroupInstrumentation does the same
record.headers();
}
}

/**
* this method transfers the consumer group from the KafkaConsumer class key to the
* ConsumerRecords key. This is necessary because in the poll method, we don't have access to the
* KafkaConsumer class.
*/
public static class RecordsAdvice {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static AgentScope onEnter() {
boolean dataStreamsEnabled;
if (activeSpan() != null) {
dataStreamsEnabled = activeSpan().traceConfig().isDataStreamsEnabled();
} else {
dataStreamsEnabled = Config.get().isDataStreamsEnabled();
}
if (dataStreamsEnabled) {
final AgentSpan span = startSpan(KAFKA_POLL);
return activateSpan(span);
}
return null;
}

@Advice.OnMethodExit(suppress = Throwable.class)
public static void captureGroup(
@Advice.Enter final AgentScope scope,
@Advice.This KafkaConsumer consumer,
@Advice.Return ConsumerRecords records) {
int recordsCount = 0;
if (records != null) {
KafkaConsumerInfo kafkaConsumerInfo =
InstrumentationContext.get(KafkaConsumer.class, KafkaConsumerInfo.class).get(consumer);
if (kafkaConsumerInfo != null) {
InstrumentationContext.get(ConsumerRecords.class, KafkaConsumerInfo.class)
.put(records, kafkaConsumerInfo);
}
recordsCount = records.count();
}
if (scope == null) {
return;
}
AgentSpan span = scope.span();
span.setTag(KAFKA_RECORDS_COUNT, recordsCount);
span.finish();
scope.close();
}
}
}
Loading

0 comments on commit a6829ea

Please sign in to comment.