Skip to content

Commit 0467411

Browse files
committed
enable pyright for aiokafka, fix key type
1 parent 3a05425 commit 0467411

File tree

8 files changed

+76
-42
lines changed

8 files changed

+76
-42
lines changed

instrumentation/opentelemetry-instrumentation-aiokafka/pyproject.toml

+1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ dependencies = [
2929
"opentelemetry-api ~= 1.27",
3030
"opentelemetry-instrumentation == 0.54b0.dev",
3131
"opentelemetry-semantic-conventions == 0.54b0.dev",
32+
"typing_extensions ~= 4.1",
3233
]
3334

3435
[project.optional-dependencies]

instrumentation/opentelemetry-instrumentation-aiokafka/src/opentelemetry/instrumentation/aiokafka/__init__.py

+23-4
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,15 @@ async def async_consume_hook(span, record, args, kwargs):
6969
___
7070
"""
7171

72+
from __future__ import annotations
73+
7274
from asyncio import iscoroutinefunction
73-
from typing import Collection
75+
from typing import TYPE_CHECKING, Collection
7476

7577
import aiokafka
76-
from wrapt import wrap_function_wrapper
78+
from wrapt import (
79+
wrap_function_wrapper, # type: ignore[reportUnknownVariableType]
80+
)
7781

7882
from opentelemetry import trace
7983
from opentelemetry.instrumentation.aiokafka.package import _instruments
@@ -87,6 +91,21 @@ async def async_consume_hook(span, record, args, kwargs):
8791
from opentelemetry.instrumentation.utils import unwrap
8892
from opentelemetry.semconv.schemas import Schemas
8993

94+
if TYPE_CHECKING:
95+
from typing import TypedDict
96+
97+
from typing_extensions import Unpack
98+
99+
from .utils import ConsumeHookT, ProduceHookT
100+
101+
class InstrumentKwargs(TypedDict, total=False):
102+
tracer_provider: trace.TracerProvider
103+
async_produce_hook: ProduceHookT
104+
async_consume_hook: ConsumeHookT
105+
106+
class UninstrumentKwargs(TypedDict, total=False):
107+
pass
108+
90109

91110
class AIOKafkaInstrumentor(BaseInstrumentor):
92111
"""An instrumentor for kafka module
@@ -96,7 +115,7 @@ class AIOKafkaInstrumentor(BaseInstrumentor):
96115
def instrumentation_dependencies(self) -> Collection[str]:
97116
return _instruments
98117

99-
def _instrument(self, **kwargs):
118+
def _instrument(self, **kwargs: Unpack[InstrumentKwargs]):
100119
"""Instruments the kafka module
101120
102121
Args:
@@ -138,7 +157,7 @@ def _instrument(self, **kwargs):
138157
_wrap_getmany(tracer, async_consume_hook),
139158
)
140159

141-
def _uninstrument(self, **kwargs):
160+
def _uninstrument(self, **kwargs: Unpack[UninstrumentKwargs]):
142161
unwrap(aiokafka.AIOKafkaProducer, "send")
143162
unwrap(aiokafka.AIOKafkaConsumer, "getone")
144163
unwrap(aiokafka.AIOKafkaConsumer, "getmany")

instrumentation/opentelemetry-instrumentation-aiokafka/src/opentelemetry/instrumentation/aiokafka/py.typed

Whitespace-only changes.

instrumentation/opentelemetry-instrumentation-aiokafka/src/opentelemetry/instrumentation/aiokafka/utils.py

+46-37
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import asyncio
4+
import contextlib
45
import json
56
from logging import getLogger
67
from typing import (
@@ -14,6 +15,7 @@
1415
Protocol,
1516
Sequence,
1617
Tuple,
18+
cast,
1719
)
1820

1921
import aiokafka
@@ -56,19 +58,18 @@ async def __call__(
5658
headers: HeadersT | None = None,
5759
) -> asyncio.Future[RecordMetadata]: ...
5860

59-
ProduceHookT = Optional[
60-
Callable[[Span, Tuple[Any, ...], Dict[str, Any]], Awaitable[None]]
61+
ProduceHookT = Callable[
62+
[Span, Tuple[Any, ...], Dict[str, Any]], Awaitable[None]
6163
]
62-
ConsumeHookT = Optional[
63-
Callable[
64-
[
65-
Span,
66-
aiokafka.ConsumerRecord[object, object],
67-
Tuple[aiokafka.TopicPartition, ...],
68-
Dict[str, Any],
69-
],
70-
Awaitable[None],
71-
]
64+
65+
ConsumeHookT = Callable[
66+
[
67+
Span,
68+
aiokafka.ConsumerRecord[object, object],
69+
Tuple[aiokafka.TopicPartition, ...],
70+
Dict[str, Any],
71+
],
72+
Awaitable[None],
7273
]
7374

7475
HeadersT = Sequence[Tuple[str, Optional[bytes]]]
@@ -89,7 +90,7 @@ def _extract_client_id(client: aiokafka.AIOKafkaClient) -> str:
8990
def _extract_consumer_group(
9091
consumer: aiokafka.AIOKafkaConsumer,
9192
) -> str | None:
92-
return consumer._group_id
93+
return consumer._group_id # type: ignore[reportUnknownVariableType]
9394

9495

9596
def _extract_argument(
@@ -139,6 +140,17 @@ def _move_headers_to_kwargs(
139140
return args[:5], kwargs
140141

141142

143+
def _deserialize_key(key: object | None) -> str | None:
144+
if key is None:
145+
return None
146+
147+
if isinstance(key, bytes):
148+
with contextlib.suppress(UnicodeDecodeError):
149+
return key.decode()
150+
151+
return str(key)
152+
153+
142154
async def _extract_send_partition(
143155
instance: aiokafka.AIOKafkaProducer,
144156
args: tuple[Any, ...],
@@ -150,17 +162,20 @@ async def _extract_send_partition(
150162
key = _extract_send_key(args, kwargs)
151163
value = _extract_send_value(args, kwargs)
152164
partition = _extract_argument("partition", 3, None, args, kwargs)
153-
key_bytes, value_bytes = instance._serialize(topic, key, value)
165+
key_bytes, value_bytes = cast(
166+
"tuple[bytes | None, bytes | None]",
167+
instance._serialize(topic, key, value), # type: ignore[reportUnknownMemberType]
168+
)
154169
valid_types = (bytes, bytearray, memoryview, type(None))
155170
if (
156171
type(key_bytes) not in valid_types
157172
or type(value_bytes) not in valid_types
158173
):
159174
return None
160175

161-
await instance.client._wait_on_metadata(topic)
176+
await instance.client._wait_on_metadata(topic) # type: ignore[reportUnknownMemberType]
162177

163-
return instance._partition(
178+
return instance._partition( # type: ignore[reportUnknownMemberType]
164179
topic, partition, key, value, key_bytes, value_bytes
165180
)
166181
except Exception as exception: # pylint: disable=W0703
@@ -170,26 +185,21 @@ async def _extract_send_partition(
170185

171186
class AIOKafkaContextGetter(textmap.Getter["HeadersT"]):
172187
def get(self, carrier: HeadersT, key: str) -> list[str] | None:
173-
if carrier is None:
174-
return None
175-
176188
for item_key, value in carrier:
177189
if item_key == key:
178190
if value is not None:
179191
return [value.decode()]
180192
return None
181193

182194
def keys(self, carrier: HeadersT) -> list[str]:
183-
if carrier is None:
184-
return []
185-
return [key for (key, value) in carrier]
195+
return [key for (key, _) in carrier]
186196

187197

188198
class AIOKafkaContextSetter(textmap.Setter["HeadersT"]):
189199
def set(
190200
self, carrier: HeadersT, key: str | None, value: str | None
191201
) -> None:
192-
if carrier is None or key is None:
202+
if key is None:
193203
return
194204

195205
if not isinstance(carrier, MutableSequence):
@@ -215,7 +225,7 @@ def _enrich_base_span(
215225
client_id: str,
216226
topic: str,
217227
partition: int | None,
218-
key: object | None,
228+
key: str | None,
219229
) -> None:
220230
span.set_attribute(
221231
messaging_attributes.MESSAGING_SYSTEM,
@@ -235,8 +245,7 @@ def _enrich_base_span(
235245

236246
if key is not None:
237247
span.set_attribute(
238-
messaging_attributes.MESSAGING_KAFKA_MESSAGE_KEY,
239-
key, # FIXME: serialize key to str?
248+
messaging_attributes.MESSAGING_KAFKA_MESSAGE_KEY, key
240249
)
241250

242251

@@ -247,7 +256,7 @@ def _enrich_send_span(
247256
client_id: str,
248257
topic: str,
249258
partition: int | None,
250-
key: object | None,
259+
key: str | None,
251260
) -> None:
252261
if not span.is_recording():
253262
return
@@ -276,7 +285,7 @@ def _enrich_getone_span(
276285
consumer_group: str | None,
277286
topic: str,
278287
partition: int | None,
279-
key: object | None,
288+
key: str | None,
280289
offset: int,
281290
) -> None:
282291
if not span.is_recording():
@@ -399,8 +408,8 @@ def _get_span_name(operation: str, topic: str):
399408
return f"{topic} {operation}"
400409

401410

402-
def _wrap_send(
403-
tracer: Tracer, async_produce_hook: ProduceHookT
411+
def _wrap_send( # type: ignore[reportUnusedFunction]
412+
tracer: Tracer, async_produce_hook: ProduceHookT | None
404413
) -> Callable[..., Awaitable[asyncio.Future[RecordMetadata]]]:
405414
async def _traced_send(
406415
func: AIOKafkaSendProto,
@@ -417,7 +426,7 @@ async def _traced_send(
417426
topic = _extract_send_topic(args, kwargs)
418427
bootstrap_servers = _extract_bootstrap_servers(instance.client)
419428
client_id = _extract_client_id(instance.client)
420-
key = _extract_send_key(args, kwargs)
429+
key = _deserialize_key(_extract_send_key(args, kwargs))
421430
partition = await _extract_send_partition(instance, args, kwargs)
422431
span_name = _get_span_name("send", topic)
423432
with tracer.start_as_current_span(
@@ -449,7 +458,7 @@ async def _traced_send(
449458

450459
async def _create_consumer_span(
451460
tracer: Tracer,
452-
async_consume_hook: ConsumeHookT,
461+
async_consume_hook: ConsumeHookT | None,
453462
record: aiokafka.ConsumerRecord[object, object],
454463
extracted_context: Context,
455464
bootstrap_servers: str | list[str],
@@ -473,7 +482,7 @@ async def _create_consumer_span(
473482
consumer_group=consumer_group,
474483
topic=record.topic,
475484
partition=record.partition,
476-
key=record.key,
485+
key=_deserialize_key(record.key),
477486
offset=record.offset,
478487
)
479488
try:
@@ -486,8 +495,8 @@ async def _create_consumer_span(
486495
return span
487496

488497

489-
def _wrap_getone(
490-
tracer: Tracer, async_consume_hook: ConsumeHookT
498+
def _wrap_getone( # type: ignore[reportUnusedFunction]
499+
tracer: Tracer, async_consume_hook: ConsumeHookT | None
491500
) -> Callable[..., Awaitable[aiokafka.ConsumerRecord[object, object]]]:
492501
async def _traced_getone(
493502
func: AIOKafkaGetOneProto,
@@ -521,8 +530,8 @@ async def _traced_getone(
521530
return _traced_getone
522531

523532

524-
def _wrap_getmany(
525-
tracer: Tracer, async_consume_hook: ConsumeHookT
533+
def _wrap_getmany( # type: ignore[reportUnusedFunction]
534+
tracer: Tracer, async_consume_hook: ConsumeHookT | None
526535
) -> Callable[
527536
...,
528537
Awaitable[

instrumentation/opentelemetry-instrumentation-aiokafka/tests/test_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -360,7 +360,7 @@ async def test_create_consumer_span(
360360
consumer_group=consumer_group,
361361
topic=record.topic,
362362
partition=record.partition,
363-
key=record.key,
363+
key=str(record.key),
364364
offset=record.offset,
365365
)
366366
consume_hook.assert_awaited_once_with(

pyproject.toml

+2
Original file line numberDiff line numberDiff line change
@@ -196,13 +196,15 @@ pythonVersion = "3.8"
196196
reportPrivateUsage = false # Ignore private attributes added by instrumentation packages.
197197
# Add progressively instrumentation packages here.
198198
include = [
199+
"instrumentation/opentelemetry-instrumentation-aiokafka",
199200
"instrumentation/opentelemetry-instrumentation-asyncclick",
200201
"instrumentation/opentelemetry-instrumentation-threading",
201202
"instrumentation-genai/opentelemetry-instrumentation-vertexai",
202203
]
203204
# We should also add type hints to the test suite - It helps on finding bugs.
204205
# We are excluding for now because it's easier, and more important to add to the instrumentation packages.
205206
exclude = [
207+
"instrumentation/opentelemetry-instrumentation-aiokafka/tests/**/*.py",
206208
"instrumentation/opentelemetry-instrumentation-asyncclick/tests/**/*.py",
207209
"instrumentation/opentelemetry-instrumentation-threading/tests/**",
208210
"instrumentation-genai/opentelemetry-instrumentation-vertexai/tests/**/*.py",

tox.ini

+1
Original file line numberDiff line numberDiff line change
@@ -1047,6 +1047,7 @@ deps =
10471047
{toxinidir}/util/opentelemetry-util-http
10481048
{toxinidir}/instrumentation-genai/opentelemetry-instrumentation-vertexai[instruments]
10491049
{toxinidir}/instrumentation-genai/opentelemetry-instrumentation-google-genai[instruments]
1050+
{toxinidir}/instrumentation/opentelemetry-instrumentation-aiokafka[instruments]
10501051
{toxinidir}/instrumentation/opentelemetry-instrumentation-asyncclick[instruments]
10511052

10521053
commands =

uv.lock

+2
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)