Skip to content

Commit 8592c61

Browse files
Kafka Message Header API (#120)
* Kafka Message Header API Motivation: Be able to attach headers to a `KafkaProducerMessage` and read out headers attached to a `KafkaConsumerMessage`. Modifications: * create new type `struct KafkaHeader` representing a key-value pair of `String` key and `ByteBuffer` value * add property `headers: [KafkaHeader]` to `KafkaConsumerMessage` and `KafkaConsumerMessage` * use `rd_kafka_produceva` (varidadic arguments) to produce messages as `rd_kafka_produce` did not support setting message headers * create helper class `RDKafkaUnsafeProducerMessage` that helps configuring the varidadic argument array for `rd_kafka_produceva` * add new test asserting that both producing and consuming messages with message headers works * Remove KafkaContiguousBytes TODOs * Review Franz Modifications: * no copying of `KafkaProducerMessage` headers and values -> build scoped accessor helper that recursively accesses all underlying pointers of the `KafkaProducerMessage`'s `headers: [KafkaHeader]` * only use `rd_kafka_produceva` when `message.headers.isEmpty == false` * Review Franz: simplify recursion cases
1 parent 1608c4a commit 8592c61

File tree

6 files changed

+384
-35
lines changed

6 files changed

+384
-35
lines changed

Sources/Kafka/Configuration/KafkaConfiguration+Security.swift

+1-1
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ extension KafkaConfiguration {
173173

174174
/// Verify the identity of the broker.
175175
///
176-
/// Parameters:
176+
/// - Parameters:
177177
/// - trustRoots: File or directory path to CA certificate(s) for verifying the broker's key.
178178
/// - certificateRevocationListPath: Path to CRL for verifying broker's certificate validity.
179179
public static func verify(

Sources/Kafka/KafkaConsumerMessage.swift

+83
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@ public struct KafkaConsumerMessage {
2121
public var topic: String
2222
/// The partition that the message was received from.
2323
public var partition: KafkaPartition
24+
/// The headers of the message.
25+
public var headers: [KafkaHeader]
2426
/// The key of the message.
2527
public var key: ByteBuffer?
2628
/// The body of the message.
@@ -57,6 +59,8 @@ public struct KafkaConsumerMessage {
5759

5860
self.partition = KafkaPartition(rawValue: Int(rdKafkaMessage.partition))
5961

62+
self.headers = try Self.getHeaders(for: messagePointer)
63+
6064
if let keyPointer = rdKafkaMessage.key {
6165
let keyBufferPointer = UnsafeRawBufferPointer(
6266
start: keyPointer,
@@ -80,3 +84,82 @@ extension KafkaConsumerMessage: Hashable {}
8084
// MARK: - KafkaConsumerMessage + Sendable
8185

8286
extension KafkaConsumerMessage: Sendable {}
87+
88+
// MARK: - Helpers
89+
90+
extension KafkaConsumerMessage {
91+
/// Extract ``KafkaHeader``s from a `rd_kafka_message_t` pointer.
92+
///
93+
/// - Parameters:
94+
/// - for: Pointer to the `rd_kafka_message_t` object to extract the headers from.
95+
private static func getHeaders(
96+
for messagePointer: UnsafePointer<rd_kafka_message_t>
97+
) throws -> [KafkaHeader] {
98+
var result: [KafkaHeader] = []
99+
var headers: OpaquePointer?
100+
101+
var readStatus = rd_kafka_message_headers(messagePointer, &headers)
102+
103+
if readStatus == RD_KAFKA_RESP_ERR__NOENT {
104+
// No Header Entries
105+
return result
106+
}
107+
108+
guard readStatus == RD_KAFKA_RESP_ERR_NO_ERROR else {
109+
throw KafkaError.rdKafkaError(wrapping: readStatus)
110+
}
111+
112+
guard let headers else {
113+
return result
114+
}
115+
116+
let headerCount = rd_kafka_header_cnt(headers)
117+
result.reserveCapacity(headerCount)
118+
119+
var headerIndex = 0
120+
121+
while readStatus != RD_KAFKA_RESP_ERR__NOENT && headerIndex < headerCount {
122+
var headerKeyPointer: UnsafePointer<CChar>?
123+
var headerValuePointer: UnsafeRawPointer?
124+
var headerValueSize = 0
125+
126+
readStatus = rd_kafka_header_get_all(
127+
headers,
128+
headerIndex,
129+
&headerKeyPointer,
130+
&headerValuePointer,
131+
&headerValueSize
132+
)
133+
134+
if readStatus == RD_KAFKA_RESP_ERR__NOENT {
135+
// No Header Entries
136+
return result
137+
}
138+
139+
guard readStatus == RD_KAFKA_RESP_ERR_NO_ERROR else {
140+
throw KafkaError.rdKafkaError(wrapping: readStatus)
141+
}
142+
143+
guard let headerKeyPointer else {
144+
fatalError("Found null pointer when reading KafkaConsumerMessage header key")
145+
}
146+
let headerKey = String(cString: headerKeyPointer)
147+
148+
var headerValue: ByteBuffer?
149+
if let headerValuePointer, headerValueSize > 0 {
150+
let headerValueBufferPointer = UnsafeRawBufferPointer(
151+
start: headerValuePointer,
152+
count: headerValueSize
153+
)
154+
headerValue = ByteBuffer(bytes: headerValueBufferPointer)
155+
}
156+
157+
let newHeader = KafkaHeader(key: headerKey, value: headerValue)
158+
result.append(newHeader)
159+
160+
headerIndex += 1
161+
}
162+
163+
return result
164+
}
165+
}

Sources/Kafka/KafkaHeader.swift

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
//===----------------------------------------------------------------------===//
2+
//
3+
// This source file is part of the swift-kafka-client open source project
4+
//
5+
// Copyright (c) 2023 Apple Inc. and the swift-kafka-client project authors
6+
// Licensed under Apache License v2.0
7+
//
8+
// See LICENSE.txt for license information
9+
// See CONTRIBUTORS.txt for the list of swift-kafka-client project authors
10+
//
11+
// SPDX-License-Identifier: Apache-2.0
12+
//
13+
//===----------------------------------------------------------------------===//
14+
15+
import NIOCore
16+
17+
/// A structure representing a header for a Kafka message.
18+
/// Headers are key-value pairs that can be attached to Kafka messages to provide additional metadata.
19+
public struct KafkaHeader: Sendable, Hashable {
20+
/// The key associated with the header.
21+
public var key: String
22+
23+
/// The value associated with the header.
24+
public var value: ByteBuffer?
25+
26+
/// Initializes a new Kafka header with the provided key and optional value.
27+
///
28+
/// - Parameters:
29+
/// - key: The key associated with the header.
30+
/// - value: The optional binary value associated with the header.
31+
public init(
32+
key: String,
33+
value: ByteBuffer? = nil
34+
) {
35+
self.key = key
36+
self.value = value
37+
}
38+
}

Sources/Kafka/KafkaProducerMessage.swift

+12-3
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ public struct KafkaProducerMessage<Key: KafkaContiguousBytes, Value: KafkaContig
2525
/// This means the message will be automatically assigned a partition using the topic's partitioner function.
2626
public var partition: KafkaPartition
2727

28+
/// The headers of the message.
29+
public var headers: [KafkaHeader]
30+
2831
/// The optional key associated with the message.
2932
/// If the ``KafkaPartition`` is ``KafkaPartition/unassigned``, the ``KafkaProducerMessage/key`` is used to ensure
3033
/// that two ``KafkaProducerMessage``s with the same key still get sent to the same ``KafkaPartition``.
@@ -38,18 +41,21 @@ public struct KafkaProducerMessage<Key: KafkaContiguousBytes, Value: KafkaContig
3841
/// - Parameters:
3942
/// - topic: The topic the message will be sent to. Topics may be created by the `KafkaProducer` if non-existent.
4043
/// - partition: The topic partition the message will be sent to. If not set explicitly, the partition will be assigned automatically.
44+
/// - headers: The headers of the message.
4145
/// - key: Used to guarantee that messages with the same key will be sent to the same partition so that their order is preserved.
4246
/// - value: The message's value.
4347
public init(
4448
topic: String,
4549
partition: KafkaPartition = .unassigned,
50+
headers: [KafkaHeader] = [],
4651
key: Key,
4752
value: Value
4853
) {
4954
self.topic = topic
55+
self.partition = partition
56+
self.headers = headers
5057
self.key = key
5158
self.value = value
52-
self.partition = partition
5359
}
5460
}
5561

@@ -59,16 +65,19 @@ extension KafkaProducerMessage where Key == Never {
5965
/// - Parameters:
6066
/// - topic: The topic the message will be sent to. Topics may be created by the `KafkaProducer` if non-existent.
6167
/// - partition: The topic partition the message will be sent to. If not set explicitly, the partition will be assigned automatically.
68+
/// - headers: The headers of the message.
6269
/// - value: The message body.
6370
public init(
6471
topic: String,
6572
partition: KafkaPartition = .unassigned,
73+
headers: [KafkaHeader] = [],
6674
value: Value
6775
) {
6876
self.topic = topic
69-
self.value = value
70-
self.key = nil
7177
self.partition = partition
78+
self.headers = headers
79+
self.key = nil
80+
self.value = value
7281
}
7382
}
7483

0 commit comments

Comments
 (0)