Skip to content

Commit 300e2d3

Browse files
authored
Merge pull request #195 from holixon/feature/event_upcasters
feature: implement support for event upcasters, fix #193
2 parents 4ad69af + 51cfdb5 commit 300e2d3

File tree

6 files changed

+198
-43
lines changed

6 files changed

+198
-43
lines changed

kafka-spring-boot-autoconfigure/src/main/java/org/axonframework/extensions/kafka/autoconfig/KafkaAutoConfiguration.java

+16-8
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
import org.axonframework.extensions.kafka.eventhandling.producer.KafkaPublisher;
3636
import org.axonframework.extensions.kafka.eventhandling.producer.ProducerFactory;
3737
import org.axonframework.serialization.Serializer;
38+
import org.axonframework.serialization.upcasting.event.EventUpcasterChain;
3839
import org.axonframework.spring.config.AxonConfiguration;
3940
import org.axonframework.springboot.autoconfig.AxonAutoConfiguration;
4041
import org.axonframework.springboot.autoconfig.InfraConfiguration;
@@ -84,9 +85,14 @@ public KafkaAutoConfiguration(KafkaProperties properties) {
8485
@Bean
8586
@ConditionalOnMissingBean
8687
public KafkaMessageConverter<String, byte[]> kafkaMessageConverter(
87-
@Qualifier("eventSerializer") Serializer eventSerializer
88+
@Qualifier("eventSerializer") Serializer eventSerializer,
89+
AxonConfiguration config
8890
) {
89-
return DefaultKafkaMessageConverter.builder().serializer(eventSerializer).build();
91+
return DefaultKafkaMessageConverter
92+
.builder()
93+
.serializer(eventSerializer)
94+
.upcasterChain(config.upcasterChain() != null ? config.upcasterChain() : new EventUpcasterChain())
95+
.build();
9096
}
9197

9298
@Bean("axonKafkaProducerFactory")
@@ -123,9 +129,10 @@ private boolean isNonEmptyString(String s) {
123129
@ConditionalOnMissingBean
124130
@Bean(destroyMethod = "shutDown")
125131
@ConditionalOnBean({ProducerFactory.class, KafkaMessageConverter.class})
126-
public KafkaPublisher<String, byte[]> kafkaPublisher(ProducerFactory<String, byte[]> axonKafkaProducerFactory,
127-
KafkaMessageConverter<String, byte[]> kafkaMessageConverter,
128-
AxonConfiguration configuration) {
132+
public KafkaPublisher<String, byte[]> kafkaPublisher(
133+
ProducerFactory<String, byte[]> axonKafkaProducerFactory,
134+
KafkaMessageConverter<String, byte[]> kafkaMessageConverter,
135+
AxonConfiguration configuration) {
129136
return KafkaPublisher.<String, byte[]>builder()
130137
.producerFactory(axonKafkaProducerFactory)
131138
.messageConverter(kafkaMessageConverter)
@@ -138,9 +145,10 @@ public KafkaPublisher<String, byte[]> kafkaPublisher(ProducerFactory<String, byt
138145
@Bean
139146
@ConditionalOnMissingBean
140147
@ConditionalOnBean({KafkaPublisher.class})
141-
public KafkaEventPublisher<String, byte[]> kafkaEventPublisher(KafkaPublisher<String, byte[]> kafkaPublisher,
142-
KafkaProperties kafkaProperties,
143-
EventProcessingConfigurer eventProcessingConfigurer) {
148+
public KafkaEventPublisher<String, byte[]> kafkaEventPublisher(
149+
KafkaPublisher<String, byte[]> kafkaPublisher,
150+
KafkaProperties kafkaProperties,
151+
EventProcessingConfigurer eventProcessingConfigurer) {
144152
KafkaEventPublisher<String, byte[]> kafkaEventPublisher =
145153
KafkaEventPublisher.<String, byte[]>builder().kafkaPublisher(kafkaPublisher).build();
146154

kafka/src/main/java/org/axonframework/extensions/kafka/eventhandling/DefaultKafkaMessageConverter.java

+100-26
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,9 @@
2121
import org.apache.kafka.common.header.Headers;
2222
import org.apache.kafka.common.header.internals.RecordHeader;
2323
import org.axonframework.common.AxonConfigurationException;
24+
import org.axonframework.eventhandling.EventData;
2425
import org.axonframework.eventhandling.EventMessage;
26+
import org.axonframework.eventhandling.GenericDomainEventEntry;
2527
import org.axonframework.eventhandling.GenericDomainEventMessage;
2628
import org.axonframework.eventhandling.GenericEventMessage;
2729
import org.axonframework.eventhandling.async.SequencingPolicy;
@@ -31,27 +33,34 @@
3133
import org.axonframework.serialization.SerializedMessage;
3234
import org.axonframework.serialization.SerializedObject;
3335
import org.axonframework.serialization.Serializer;
34-
import org.axonframework.serialization.SimpleSerializedObject;
36+
import org.axonframework.serialization.upcasting.event.EventUpcasterChain;
37+
import org.axonframework.serialization.upcasting.event.InitialEventRepresentation;
3538
import org.slf4j.Logger;
3639
import org.slf4j.LoggerFactory;
3740

3841
import java.time.Instant;
3942
import java.util.Arrays;
4043
import java.util.Optional;
4144
import java.util.function.BiFunction;
45+
import java.util.stream.Stream;
4246

4347
import static org.axonframework.common.BuilderUtils.assertNonNull;
4448
import static org.axonframework.extensions.kafka.eventhandling.HeaderUtils.*;
4549
import static org.axonframework.messaging.Headers.*;
4650

4751
/**
48-
* Converts and {@link EventMessage} to a {@link ProducerRecord} Kafka message and {from a @link ConsumerRecord} Kafka
52+
* Converts and {@link EventMessage} to a {@link ProducerRecord} Kafka message and from a {@link ConsumerRecord} Kafka
4953
* message back to an EventMessage (if possible).
5054
* <p>
5155
* During conversion meta data entries with the {@code 'axon-metadata-'} prefix are passed to the {@link Headers}. Other
5256
* message-specific attributes are added as metadata. The {@link EventMessage#getPayload()} is serialized using the
53-
* configured {@link Serializer} and passed as the Kafka recordd's body.
57+
* configured {@link Serializer} and passed as the Kafka record's body.
5458
* <p>
59+
* <p>
60+
* If an up-caster / up-caster chain is configured, this converter will pass the converted messages through it.
61+
* Please note, that since the message converter consumes records one-by-one, the up-casting functionality is
62+
* limited to one-to-one and one-to-many up-casters only.
63+
* </p>
5564
* This implementation will suffice in most cases.
5665
*
5766
* @author Nakul Mishra
@@ -65,6 +74,7 @@ public class DefaultKafkaMessageConverter implements KafkaMessageConverter<Strin
6574
private final Serializer serializer;
6675
private final SequencingPolicy<? super EventMessage<?>> sequencingPolicy;
6776
private final BiFunction<String, Object, RecordHeader> headerValueMapper;
77+
private final EventUpcasterChain upcasterChain;
6878

6979
/**
7080
* Instantiate a {@link DefaultKafkaMessageConverter} based on the fields contained in the {@link Builder}.
@@ -80,6 +90,7 @@ protected DefaultKafkaMessageConverter(Builder builder) {
8090
this.serializer = builder.serializer;
8191
this.sequencingPolicy = builder.sequencingPolicy;
8292
this.headerValueMapper = builder.headerValueMapper;
93+
this.upcasterChain = builder.upcasterChain;
8394
}
8495

8596
/**
@@ -131,43 +142,93 @@ public Optional<EventMessage<?>> readKafkaMessage(ConsumerRecord<String, byte[]>
131142
Headers headers = consumerRecord.headers();
132143
if (isAxonMessage(headers)) {
133144
byte[] messageBody = consumerRecord.value();
134-
SerializedMessage<?> message = extractSerializedMessage(headers, messageBody);
135-
return buildMessage(headers, message);
145+
final EventData<?> eventData = createEventData(headers, messageBody);
146+
return upcasterChain
147+
.upcast(Stream.of(new InitialEventRepresentation(eventData, serializer)))
148+
.findFirst()
149+
.map(upcastedEventData -> new SerializedMessage<>(
150+
upcastedEventData.getMessageIdentifier(),
151+
new LazyDeserializingObject<>(upcastedEventData.getData(), serializer),
152+
upcastedEventData.getMetaData()
153+
)
154+
).flatMap(serializedMessage -> buildMessage(headers, serializedMessage));
136155
}
137156
} catch (Exception e) {
138157
logger.trace("Error converting ConsumerRecord [{}] to an EventMessage", consumerRecord, e);
139158
}
140-
141159
return Optional.empty();
142160
}
143161

144-
private boolean isAxonMessage(Headers headers) {
145-
return keys(headers).containsAll(Arrays.asList(MESSAGE_ID, MESSAGE_TYPE));
146-
}
147-
148-
private SerializedMessage<?> extractSerializedMessage(Headers headers, byte[] messageBody) {
149-
SimpleSerializedObject<byte[]> serializedObject = new SimpleSerializedObject<>(
150-
messageBody,
151-
byte[].class,
162+
/**
163+
* Constructs event data representation from given Kafka headers and byte array body.
164+
* <p>
165+
* This method <i>reuses</i> the {@link GenericDomainEventEntry} class for both types of events which can be
166+
* transmitted via Kafka. For domain events, the fields <code>aggregateType</code>, <code>aggregateId</code> and
167+
* <code>aggregateSeq</code> will contain the corresponding values, but for the simple event they will be
168+
* <code>null</code>. This is ok to pass <code>null</code> to those values and <code>0L</code> to
169+
* <code>aggregateSeq</code>, since the {@link InitialEventRepresentation} does the same in its constructor and
170+
* is implemented in a null-tolerant way. Check {@link DefaultKafkaMessageConverter#isDomainEvent(Headers)} for more
171+
* details.
172+
* </p>
173+
*
174+
* @param headers Kafka headers.
175+
* @param messageBody Kafka payload as a byte array.
176+
* @return event data.
177+
*/
178+
private EventData<?> createEventData(Headers headers, byte[] messageBody) {
179+
return new GenericDomainEventEntry<>(
180+
valueAsString(headers, AGGREGATE_TYPE),
181+
valueAsString(headers, AGGREGATE_ID),
182+
valueAsLong(headers, AGGREGATE_SEQ, 0L),
183+
valueAsString(headers, MESSAGE_ID),
184+
valueAsLong(headers, MESSAGE_TIMESTAMP),
152185
valueAsString(headers, MESSAGE_TYPE),
153-
valueAsString(headers, MESSAGE_REVISION, null)
186+
valueAsString(headers, MESSAGE_REVISION, null),
187+
messageBody,
188+
extractMetadataAsBytes(headers)
154189
);
190+
}
155191

156-
return new SerializedMessage<>(
157-
valueAsString(headers, MESSAGE_ID),
158-
new LazyDeserializingObject<>(serializedObject, serializer),
159-
new LazyDeserializingObject<>(MetaData.from(extractAxonMetadata(headers)))
160-
);
192+
private byte[] extractMetadataAsBytes(Headers headers) {
193+
return serializer.serialize(MetaData.from(extractAxonMetadata(headers)), byte[].class).getData();
161194
}
162195

163-
private Optional<EventMessage<?>> buildMessage(Headers headers, SerializedMessage<?> message) {
196+
private static boolean isAxonMessage(Headers headers) {
197+
return keys(headers).containsAll(Arrays.asList(MESSAGE_ID, MESSAGE_TYPE));
198+
}
199+
200+
/**
201+
* Checks if the event is originated from an aggregate (domain event) or is a simple event sent over the bus.
202+
* <p>
203+
* The difference between a DomainEventMessage and an EventMessage, is the following three fields:
204+
* <ul>
205+
* <li>The type - represents the Aggregate the event originates from. It would be empty for an EventMessage and
206+
* filled for a DomainEventMessage.</li>
207+
* <li>The aggregateIdentifier - represents the Aggregate instance the event originates from. It would be equal
208+
* to the eventIdentifier for an EventMessage and not equal to that identifier a DomainEventMessage.</li>
209+
* <li>The sequenceNumber - represents the order of the events within an Aggregate instance's event stream.
210+
* It would be 0 at all times for an EventMessage, whereas a DomainEventMessage would be 0 or greater.</li>
211+
* </ul>
212+
* </p>
213+
*
214+
* @param headers Kafka headers.
215+
* @return <code>true</code> if the event is originated from an aggregate.
216+
*/
217+
private static boolean isDomainEvent(Headers headers) {
218+
return headers.lastHeader(AGGREGATE_TYPE) != null
219+
&& headers.lastHeader(AGGREGATE_ID) != null
220+
&& headers.lastHeader(AGGREGATE_SEQ) != null;
221+
}
222+
223+
private static Optional<EventMessage<?>> buildMessage(Headers headers, SerializedMessage<?> message) {
164224
long timestamp = valueAsLong(headers, MESSAGE_TIMESTAMP);
165-
return headers.lastHeader(AGGREGATE_ID) != null
166-
? buildDomainEvent(headers, message, timestamp)
167-
: buildEvent(message, timestamp);
225+
return isDomainEvent(headers)
226+
? buildDomainEventMessage(headers, message, timestamp)
227+
: buildEventMessage(message, timestamp);
168228
}
169229

170-
private Optional<EventMessage<?>> buildDomainEvent(Headers headers, SerializedMessage<?> message, long timestamp) {
230+
private static Optional<EventMessage<?>> buildDomainEventMessage(Headers headers, SerializedMessage<?> message,
231+
long timestamp) {
171232
return Optional.of(new GenericDomainEventMessage<>(
172233
valueAsString(headers, AGGREGATE_TYPE),
173234
valueAsString(headers, AGGREGATE_ID),
@@ -177,7 +238,7 @@ private Optional<EventMessage<?>> buildDomainEvent(Headers headers, SerializedMe
177238
));
178239
}
179240

180-
private Optional<EventMessage<?>> buildEvent(SerializedMessage<?> message, long timestamp) {
241+
private static Optional<EventMessage<?>> buildEventMessage(SerializedMessage<?> message, long timestamp) {
181242
return Optional.of(new GenericEventMessage<>(message, () -> Instant.ofEpochMilli(timestamp)));
182243
}
183244

@@ -193,6 +254,7 @@ public static class Builder {
193254
private Serializer serializer;
194255
private SequencingPolicy<? super EventMessage<?>> sequencingPolicy = SequentialPerAggregatePolicy.instance();
195256
private BiFunction<String, Object, RecordHeader> headerValueMapper = byteMapper();
257+
private EventUpcasterChain upcasterChain = new EventUpcasterChain();
196258

197259
/**
198260
* Sets the serializer to serialize the Event Message's payload with.
@@ -234,6 +296,18 @@ public Builder headerValueMapper(BiFunction<String, Object, RecordHeader> header
234296
return this;
235297
}
236298

299+
/**
300+
* Sets the {@code upcasterChain} to be used during the consumption of events.
301+
*
302+
* @param upcasterChain upcaster chain to be used on event reading.
303+
* @return the current Builder instance, for fluent interfacing
304+
*/
305+
public Builder upcasterChain(EventUpcasterChain upcasterChain) {
306+
assertNonNull(upcasterChain, "UpcasterChain must not be null");
307+
this.upcasterChain = upcasterChain;
308+
return this;
309+
}
310+
237311
/**
238312
* Initializes a {@link DefaultKafkaMessageConverter} as specified through this Builder.
239313
*

kafka/src/main/java/org/axonframework/extensions/kafka/eventhandling/HeaderUtils.java

+14
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,20 @@ public static Long valueAsLong(Headers headers, String key) {
7777
return asLong(value(headers, key));
7878
}
7979

80+
/**
81+
* Return a {@link Long} representation of the {@code value} stored under a given {@code key} inside the {@link
82+
* Headers}. In case of a missing entry {@code defaultValue} is returned.
83+
*
84+
* @param headers the Kafka {@code headers} to pull the {@link Long} value from
85+
* @param key the key corresponding to the expected {@link Long} value
86+
* @param defaultValue the default value to return when {@code key} does not exist in the given {@code headers}
87+
* @return the value as a {@link Long} corresponding to the given {@code key} in the {@code headers}
88+
*/
89+
public static Long valueAsLong(Headers headers, String key, Long defaultValue) {
90+
Long value = asLong(value(headers, key));
91+
return value != null ? value : defaultValue;
92+
}
93+
8094
/**
8195
* Converts bytes to {@link String}.
8296
*

0 commit comments

Comments
 (0)