Skip to content

Commit

Permalink
GH-500: Workaround for non-serializable header (#503)
Browse files Browse the repository at this point in the history
Fixes #500

When `listeners` are provided for `DefaultKafkaConsumerFactory`,
the target `KafkaConsumer` instance is proxied.
The `java.lang.reflect.Proxy` is `Serializable`,
but the value it is wrapping is not.
When the `MessageHeaders` is serialized (e.g. into persistent `MessageStore`),
it checks for `Serializable` type only on top-level object of the header.
Therefore, the `Proxy` is passing condition, but eventually we fail
with `NotSerializableException`, since the proxied object is not like that

* Remove `kafka_consumer` from a message before it reaches an aggregator
with its logic to serialize message into the store

This is a workaround until Spring for Apache Kafka is released
with the fix: spring-projects/spring-kafka#2822
  • Loading branch information
artembilan authored Sep 22, 2023
1 parent 7a30b39 commit 46dc4d0
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 3 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2022 the original author or authors.
* Copyright 2020-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -43,6 +43,7 @@
import org.springframework.integration.channel.FluxMessageChannel;
import org.springframework.integration.config.AggregatorFactoryBean;
import org.springframework.integration.store.MessageGroupStore;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.lang.Nullable;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
Expand All @@ -67,7 +68,12 @@ public Function<Flux<Message<?>>, Flux<Message<?>>> aggregatorFunction(
FluxMessageChannel outputChannel
) {
return input -> Flux.from(outputChannel)
.doOnRequest((request) -> inputChannel.subscribeTo(input));
.doOnRequest((request) ->
inputChannel.subscribeTo(
input.map((inputMessage) ->
MessageBuilder.fromMessage(inputMessage)
.removeHeader("kafka_consumer")
.build())));
}

@Bean
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2020 the original author or authors.
* Copyright 2020-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -16,13 +16,16 @@

package org.springframework.cloud.fn.aggregator;

import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.time.Duration;
import java.util.List;

import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;

import org.springframework.aop.framework.ProxyFactory;
import org.springframework.cloud.fn.consumer.redis.RedisTestContainerSupport;
import org.springframework.integration.IntegrationMessageHeaderAccessor;
import org.springframework.integration.redis.store.RedisMessageStore;
Expand All @@ -46,11 +49,14 @@ static void redisProperties(DynamicPropertyRegistry registry) {

@Test
public void test() {
InputStream fakeNonSerializableKafkaConsumer = new ByteArrayInputStream(new byte[0]);

Flux<Message<?>> input =
Flux.just(MessageBuilder.withPayload("2")
.setHeader(IntegrationMessageHeaderAccessor.CORRELATION_ID, "my_correlation")
.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_NUMBER, 2)
.setHeader(IntegrationMessageHeaderAccessor.SEQUENCE_SIZE, 2)
.setHeader("kafka_consumer", new ProxyFactory(fakeNonSerializableKafkaConsumer).getProxy())
.build(),
MessageBuilder.withPayload("1")
.setHeader(IntegrationMessageHeaderAccessor.CORRELATION_ID, "my_correlation")
Expand Down

0 comments on commit 46dc4d0

Please sign in to comment.