Skip to content

Commit ab09fce

Browse files
authored
Implement ExtendedTextMapGetter in kafka-clients instrumentation (#13068)
1 parent 946babb commit ab09fce

File tree

7 files changed

+63
-13
lines changed

7 files changed

+63
-13
lines changed

instrumentation/kafka/kafka-clients/kafka-clients-0.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/kafkaclients/v0_11/KafkaClientDefaultTest.java

+5-3
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ void testKafkaProducerAndConsumerSpan(boolean testHeaders) throws Exception {
108108
.hasLinks(LinkData.create(producerSpan.get().getSpanContext()))
109109
.hasParent(trace.getSpan(0))
110110
.hasAttributesSatisfyingExactly(
111-
processAttributes("10", greeting, testHeaders)),
111+
processAttributes("10", greeting, testHeaders, false)),
112112
span -> span.hasName("processing").hasParent(trace.getSpan(1))));
113113
}
114114

@@ -152,7 +152,8 @@ void testPassThroughTombstone()
152152
.hasKind(SpanKind.CONSUMER)
153153
.hasLinks(LinkData.create(producerSpan.get().getSpanContext()))
154154
.hasParent(trace.getSpan(0))
155-
.hasAttributesSatisfyingExactly(processAttributes(null, null, false))));
155+
.hasAttributesSatisfyingExactly(
156+
processAttributes(null, null, false, false))));
156157
}
157158

158159
@DisplayName("test records(TopicPartition) kafka consume")
@@ -203,6 +204,7 @@ void testRecordsWithTopicPartitionKafkaConsume()
203204
.hasKind(SpanKind.CONSUMER)
204205
.hasLinks(LinkData.create(producerSpan.get().getSpanContext()))
205206
.hasParent(trace.getSpan(0))
206-
.hasAttributesSatisfyingExactly(processAttributes(null, greeting, false))));
207+
.hasAttributesSatisfyingExactly(
208+
processAttributes(null, greeting, false, false))));
207209
}
208210
}

instrumentation/kafka/kafka-clients/kafka-clients-0.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/kafkaclients/v0_11/KafkaClientPropagationDisabledTest.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,8 @@ void testReadRemoteContextWhenPropagationIsDisabled() throws InterruptedExceptio
6464
span.hasName(SHARED_TOPIC + " process")
6565
.hasKind(SpanKind.CONSUMER)
6666
.hasLinks(Collections.emptyList())
67-
.hasAttributesSatisfyingExactly(processAttributes(null, message, false)),
67+
.hasAttributesSatisfyingExactly(
68+
processAttributes(null, message, false, false)),
6869
span -> span.hasName("processing").hasParent(trace.getSpan(0))));
6970
}
7071
}

instrumentation/kafka/kafka-clients/kafka-clients-0.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/kafkaclients/v0_11/KafkaClientSuppressReceiveSpansTest.java

+19-4
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
import io.opentelemetry.instrumentation.kafka.internal.KafkaClientPropagationBaseTest;
1313
import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension;
1414
import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension;
15+
import java.nio.charset.StandardCharsets;
1516
import java.time.Duration;
1617
import java.util.List;
1718
import java.util.concurrent.ExecutionException;
@@ -33,8 +34,19 @@ void testKafkaProduceAndConsume() throws InterruptedException {
3334
testing.runWithSpan(
3435
"parent",
3536
() -> {
37+
ProducerRecord<Integer, String> producerRecord =
38+
new ProducerRecord<>(SHARED_TOPIC, 10, greeting);
39+
producerRecord
40+
.headers()
41+
// adding baggage header in w3c baggage format
42+
.add(
43+
"baggage",
44+
"test-baggage-key-1=test-baggage-value-1".getBytes(StandardCharsets.UTF_8))
45+
.add(
46+
"baggage",
47+
"test-baggage-key-2=test-baggage-value-2".getBytes(StandardCharsets.UTF_8));
3648
producer.send(
37-
new ProducerRecord<>(SHARED_TOPIC, 10, greeting),
49+
producerRecord,
3850
(meta, ex) -> {
3951
if (ex == null) {
4052
testing.runWithSpan("producer callback", () -> {});
@@ -70,7 +82,8 @@ void testKafkaProduceAndConsume() throws InterruptedException {
7082
span.hasName(SHARED_TOPIC + " process")
7183
.hasKind(SpanKind.CONSUMER)
7284
.hasParent(trace.getSpan(1))
73-
.hasAttributesSatisfyingExactly(processAttributes("10", greeting, false)),
85+
.hasAttributesSatisfyingExactly(
86+
processAttributes("10", greeting, false, true)),
7487
span ->
7588
span.hasName("processing")
7689
.hasKind(SpanKind.INTERNAL)
@@ -108,7 +121,8 @@ void testPassThroughTombstone()
108121
span.hasName(SHARED_TOPIC + " process")
109122
.hasKind(SpanKind.CONSUMER)
110123
.hasParent(trace.getSpan(0))
111-
.hasAttributesSatisfyingExactly(processAttributes(null, null, false))));
124+
.hasAttributesSatisfyingExactly(
125+
processAttributes(null, null, false, false))));
112126
}
113127

114128
@Test
@@ -146,6 +160,7 @@ void testRecordsWithTopicPartitionKafkaConsume()
146160
span.hasName(SHARED_TOPIC + " process")
147161
.hasKind(SpanKind.CONSUMER)
148162
.hasParent(trace.getSpan(0))
149-
.hasAttributesSatisfyingExactly(processAttributes(null, greeting, false))));
163+
.hasAttributesSatisfyingExactly(
164+
processAttributes(null, greeting, false, false))));
150165
}
151166
}

instrumentation/kafka/kafka-clients/kafka-clients-0.11/testing/src/main/java/io/opentelemetry/instrumentation/kafka/internal/KafkaClientBaseTest.java

+6-1
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ protected static List<AttributeAssertion> receiveAttributes(boolean testHeaders)
216216

217217
@SuppressWarnings("deprecation") // using deprecated semconv
218218
protected static List<AttributeAssertion> processAttributes(
219-
String messageKey, String messageValue, boolean testHeaders) {
219+
String messageKey, String messageValue, boolean testHeaders, boolean testMultiBaggage) {
220220
List<AttributeAssertion> assertions =
221221
new ArrayList<>(
222222
Arrays.asList(
@@ -249,6 +249,11 @@ protected static List<AttributeAssertion> processAttributes(
249249
AttributeKey.stringArrayKey("messaging.header.test_message_header"),
250250
Collections.singletonList("test")));
251251
}
252+
253+
if (testMultiBaggage) {
254+
assertions.add(equalTo(AttributeKey.stringKey("test-baggage-key-1"), "test-baggage-value-1"));
255+
assertions.add(equalTo(AttributeKey.stringKey("test-baggage-key-2"), "test-baggage-value-2"));
256+
}
252257
return assertions;
253258
}
254259
}

instrumentation/kafka/kafka-clients/kafka-clients-2.6/library/src/test/java/io/opentelemetry/instrumentation/kafkaclients/v2_6/AbstractInterceptorsTest.java

+13-1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import io.opentelemetry.instrumentation.kafka.internal.KafkaClientBaseTest;
1111
import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension;
1212
import io.opentelemetry.instrumentation.testing.junit.LibraryInstrumentationExtension;
13+
import java.nio.charset.StandardCharsets;
1314
import java.time.Duration;
1415
import java.util.Map;
1516
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -48,8 +49,19 @@ void testInterceptors() throws InterruptedException {
4849
testing.runWithSpan(
4950
"parent",
5051
() -> {
52+
ProducerRecord<Integer, String> producerRecord =
53+
new ProducerRecord<>(SHARED_TOPIC, greeting);
54+
producerRecord
55+
.headers()
56+
// adding baggage header in w3c baggage format
57+
.add(
58+
"baggage",
59+
"test-baggage-key-1=test-baggage-value-1".getBytes(StandardCharsets.UTF_8))
60+
.add(
61+
"baggage",
62+
"test-baggage-key-2=test-baggage-value-2".getBytes(StandardCharsets.UTF_8));
5163
producer.send(
52-
new ProducerRecord<>(SHARED_TOPIC, greeting),
64+
producerRecord,
5365
(meta, ex) -> {
5466
if (ex == null) {
5567
testing.runWithSpan("producer callback", () -> {});

instrumentation/kafka/kafka-clients/kafka-clients-2.6/library/src/test/java/io/opentelemetry/instrumentation/kafkaclients/v2_6/InterceptorsSuppressReceiveSpansTest.java

+8-1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION;
1616
import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM;
1717

18+
import io.opentelemetry.api.common.AttributeKey;
1819
import io.opentelemetry.api.trace.SpanKind;
1920
import java.nio.charset.StandardCharsets;
2021
import org.assertj.core.api.AbstractLongAssert;
@@ -59,7 +60,13 @@ void assertTraces() {
5960
equalTo(MESSAGING_KAFKA_CONSUMER_GROUP, "test"),
6061
satisfies(
6162
MESSAGING_CLIENT_ID,
62-
stringAssert -> stringAssert.startsWith("consumer"))),
63+
stringAssert -> stringAssert.startsWith("consumer")),
64+
equalTo(
65+
AttributeKey.stringKey("test-baggage-key-1"),
66+
"test-baggage-value-1"),
67+
equalTo(
68+
AttributeKey.stringKey("test-baggage-key-2"),
69+
"test-baggage-value-2")),
6370
span ->
6471
span.hasName("process child")
6572
.hasKind(SpanKind.INTERNAL)

instrumentation/kafka/kafka-clients/kafka-clients-common/library/src/main/java/io/opentelemetry/instrumentation/kafka/internal/KafkaConsumerRecordGetter.java

+10-2
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,15 @@
55

66
package io.opentelemetry.instrumentation.kafka.internal;
77

8-
import io.opentelemetry.context.propagation.TextMapGetter;
8+
import io.opentelemetry.context.propagation.internal.ExtendedTextMapGetter;
99
import java.nio.charset.StandardCharsets;
10+
import java.util.Iterator;
1011
import java.util.stream.Collectors;
1112
import java.util.stream.StreamSupport;
1213
import javax.annotation.Nullable;
1314
import org.apache.kafka.common.header.Header;
1415

15-
enum KafkaConsumerRecordGetter implements TextMapGetter<KafkaProcessRequest> {
16+
enum KafkaConsumerRecordGetter implements ExtendedTextMapGetter<KafkaProcessRequest> {
1617
INSTANCE;
1718

1819
@Override
@@ -35,4 +36,11 @@ public String get(@Nullable KafkaProcessRequest carrier, String key) {
3536
}
3637
return new String(value, StandardCharsets.UTF_8);
3738
}
39+
40+
@Override
41+
public Iterator<String> getAll(@Nullable KafkaProcessRequest carrier, String key) {
42+
return StreamSupport.stream(carrier.getRecord().headers().headers(key).spliterator(), false)
43+
.map(header -> new String(header.value(), StandardCharsets.UTF_8))
44+
.iterator();
45+
}
3846
}

0 commit comments

Comments
 (0)