Skip to content

Commit

Permalink
Fix checkstyles
Browse files Browse the repository at this point in the history
  • Loading branch information
olegz committed Oct 21, 2024
1 parent fa9a80d commit 9888a08
Show file tree
Hide file tree
Showing 24 changed files with 379 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.springframework.cloud.stream.binder.kafka.properties;

import java.util.HashMap;
import java.util.Locale;
import java.util.Map;

import javax.security.auth.login.AppConfigurationEntry;
Expand Down Expand Up @@ -55,7 +56,7 @@ public KafkaJaasLoginModuleInitializer.ControlFlag getControlFlag() {
public void setControlFlag(String controlFlag) {
Assert.notNull(controlFlag, "cannot be null");
this.controlFlag = KafkaJaasLoginModuleInitializer.ControlFlag
.valueOf(controlFlag.toUpperCase());
.valueOf(controlFlag.toUpperCase(Locale.ROOT));
}

public Map<String, String> getOptions() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;

Expand Down Expand Up @@ -193,7 +194,7 @@ Function<Flux<String>, Flux<String>> uppercase() {

@Bean
Function<Flux<ReceiverRecord<byte[], byte[]>>, Flux<String>> lowercase() {
return s -> s.map(rec -> new String(rec.value()).toLowerCase());
return s -> s.map(rec -> new String(rec.value()).toLowerCase(Locale.ROOT));
}

@Bean
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
/*
* Copyright 2022-2024 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.springframework.cloud.stream.binder.reactorkafka;

import java.lang.reflect.Type;
import java.time.Duration;
import java.util.Locale;
import java.util.function.Function;
import java.util.stream.Collectors;

import brave.handler.SpanHandler;
import brave.test.TestSpanHandler;
import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationRegistry;
import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor;
import io.micrometer.tracing.brave.bridge.BraveFinishedSpan;
import io.micrometer.tracing.test.simple.SpansAssert;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.jupiter.api.Test;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.kafka.receiver.ReceiverRecord;
import reactor.kafka.receiver.observation.KafkaReceiverObservation;
import reactor.kafka.receiver.observation.KafkaRecordReceiverContext;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.autoconfigure.actuate.observability.AutoConfigureObservability;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.function.StreamBridge;
import org.springframework.context.annotation.Bean;
import org.springframework.integration.IntegrationMessageHeaderAccessor;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.converter.MessagingMessageConverter;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.messaging.Message;
import org.springframework.test.annotation.DirtiesContext;

import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;

/**
* @author Artem Bilan
* @author Soby Chacko
* @since 4.2.0
*/
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, properties = {
"spring.kafka.consumer.metadata.max.age.ms=1000",
"spring.cloud.function.definition=receive",
"spring.cloud.stream.function.reactive.uppercase=true",
"spring.cloud.stream.bindings.receive-in-0.group=rkbot-in-group",
"spring.cloud.stream.bindings.receive-in-0.destination=rkbot-in-topic",
"spring.cloud.stream.bindings.receive-out-0.destination=rkbot-out-topic",
"spring.cloud.stream.kafka.binder.enable-observation=true",
"spring.cloud.stream.kafka.binder.brokers=${spring.kafka.bootstrap-servers}",
"management.tracing.sampling.probability=1",
"spring.cloud.stream.kafka.bindings.receive-in-0.consumer.converterBeanName=fullRR"
})
@DirtiesContext
@AutoConfigureObservability
@EmbeddedKafka(topics = { "rkbot-out-topic" })
public class ReactorKafkaBinderObservationTests {

private static final TestSpanHandler SPANS = new TestSpanHandler();

@Autowired
StreamBridge streamBridge;

@Autowired
ObservationRegistry observationRegistry;

@Autowired
TestConfiguration testConfiguration;

@Autowired
private EmbeddedKafkaBroker embeddedKafka;

@Test
void endToEndReactorKafkaBinder1() {

streamBridge.send("rkbot-in-topic", MessageBuilder.withPayload("data")
.build());

await().timeout(Duration.ofSeconds(10)).untilAsserted(() -> assertThat(SPANS.spans()).hasSize(3));
SpansAssert.assertThat(SPANS.spans().stream().map(BraveFinishedSpan::fromBrave).collect(Collectors.toList()))
.haveSameTraceId();
}

@SpringBootConfiguration
@EnableAutoConfiguration(exclude = org.springframework.cloud.function.observability.ObservationAutoConfiguration.class)
public static class TestConfiguration {

@Bean
SpanHandler testSpanHandler() {
return SPANS;
}

@Bean
RecordMessageConverter fullRR() {
return new RecordMessageConverter() {

private final RecordMessageConverter converter = new MessagingMessageConverter();

@Override
public Message<?> toMessage(ConsumerRecord<?, ?> record, Acknowledgment acknowledgment,
org.apache.kafka.clients.consumer.Consumer<?, ?> consumer, Type payloadType) {

return MessageBuilder.withPayload(record).build();
}

@Override
public ProducerRecord<?, ?> fromMessage(Message<?> message, String defaultTopic) {
return this.converter.fromMessage(message, defaultTopic);
}

};
}

@Bean
Function<Flux<ReceiverRecord<byte[], byte[]>>, Flux<Message<String>>> receive(ObservationRegistry observationRegistry) {
return s -> s
.flatMap(record -> {
Observation receiverObservation =
KafkaReceiverObservation.RECEIVER_OBSERVATION.start(null,
KafkaReceiverObservation.DefaultKafkaReceiverObservationConvention.INSTANCE,
() ->
new KafkaRecordReceiverContext(
record, "user.receiver", "localhost:9092"),
observationRegistry);

return Mono.deferContextual(contextView -> Mono.just(record)
.map(rec -> new String(rec.value()).toLowerCase(Locale.ROOT))
.map(rec -> MessageBuilder.withPayload(rec).setHeader(IntegrationMessageHeaderAccessor.REACTOR_CONTEXT, contextView).build()))
.doOnTerminate(receiverObservation::stop)
.doOnError(receiverObservation::error)
.contextWrite(context -> context.put(ObservationThreadLocalAccessor.KEY, receiverObservation));
});
}
}

}

Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import java.util.Arrays;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
Expand Down Expand Up @@ -370,7 +371,7 @@ public Object onSuccess(ConfigurationPropertyName name, Bindable<?> target,
if (!concurrencyExplicitlyProvided[0]) {
concurrencyExplicitlyProvided[0] = name.getLastElement(ConfigurationPropertyName.Form.UNIFORM).equals("concurrency") &&
// name is normalized to contain only uniform elements and thus safe to call toLowerCase here.
ConfigurationPropertyName.of("spring.cloud.stream.bindings." + inboundName.toLowerCase() + ".consumer")
ConfigurationPropertyName.of("spring.cloud.stream.bindings." + inboundName.toLowerCase(Locale.ROOT) + ".consumer")
.isAncestorOf(name);
}
return result;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import java.time.Duration;
import java.util.Arrays;
import java.util.Date;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;

Expand Down Expand Up @@ -190,7 +191,7 @@ public Function<KStream<Object, String>, KStream<?, WordCount>[]> process() {

return input -> {
final Map<String, KStream<Object, WordCount>> stringKStreamMap = input
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.groupBy((key, value) -> value)
.windowedBy(TimeWindows.of(Duration.ofSeconds(5)))
.count(Materialized.as("WordCounts-branch"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
Expand Down Expand Up @@ -397,7 +398,7 @@ public static class WordCountProcessorApplication {
Function<KStream<Object, String>, KStream<String, WordCount>> process() {

return input -> input
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.of(Duration.ofMillis(5000)))
Expand Down Expand Up @@ -439,7 +440,7 @@ static class OutboundNullApplication {
Function<KStream<Object, String>, KStream<?, WordCount>> process() {
return input -> input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofSeconds(5))).count(Materialized.as("foobar-WordCounts"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import java.time.Duration;
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;

Expand Down Expand Up @@ -124,7 +125,7 @@ public Function<KStream<Object, String>, KStream<?, String>> process() {

return input -> input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.of(Duration.ofSeconds(5))).count(Materialized.as("foo-WordCounts-x"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;

Expand Down Expand Up @@ -149,7 +150,7 @@ public Function<KStream<Object, String>, KStream<?, WordCount>> process() {

return input -> input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.count(Materialized.as("WordCounts-tKWCWSIAP0")).toStream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import java.time.Duration;
import java.util.Arrays;
import java.util.Date;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.function.Function;
Expand Down Expand Up @@ -173,7 +174,7 @@ static class WordCountProcessorApplication {
public Function<KStream<Object, String>, KStream<String, WordCount>> process() {

return input -> input
.flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
.flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.of(Duration.ofMillis(5000)))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;

Expand Down Expand Up @@ -153,7 +154,7 @@ public Function<KStream<Object, String>, KStream<?, String>> process() {

return input -> input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.of(Duration.ofSeconds(5))).count(Materialized.as("foo-WordCounts-x"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import java.time.Duration;
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;

Expand Down Expand Up @@ -137,7 +138,7 @@ public Function<KStream<Object, String>, KStream<?, String>> process() {

return input -> input
.flatMapValues(
value -> Arrays.asList(value.toLowerCase().split("\\W+")))
value -> Arrays.asList(value.toLowerCase(Locale.ROOT).split("\\W+")))
.map((key, value) -> new KeyValue<>(value, value))
.groupByKey(Grouped.with(Serdes.String(), Serdes.String()))
.windowedBy(TimeWindows.of(Duration.ofSeconds(5))).count(Materialized.as("foo-WordCounts-x"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
Expand Down Expand Up @@ -1517,7 +1518,7 @@ partition calculation (see other usages of PartitionHandler) by using current pa
if (producerProperties.isDynamicPartitionUpdatesEnabled() &&
producerProperties.getPartitionKeyExpression() != null &&
!(producerProperties.getPartitionKeyExpression().getExpressionString()
.toLowerCase().contains("payload"))) {
.toLowerCase(Locale.ROOT).contains("payload"))) {
kafkaPartitionHandler =
new PartitionHandler(ExpressionUtils.createStandardEvaluationContext(beanFactory),
producerProperties, beanFactory);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package org.springframework.cloud.stream.binder.kafka.integration;

import java.util.Locale;
import java.util.Map;

import org.apache.kafka.clients.consumer.Consumer;
Expand Down Expand Up @@ -86,7 +87,7 @@ void producerTx() {
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase());
props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase(Locale.ROOT));
Consumer<?, ?> consumer = new KafkaConsumer<>(props);
embeddedKafkaBrokera.consumeFromAllEmbeddedTopics(consumer);
ConsumerRecord<?, ?> record = KafkaTestUtils.getSingleRecord(consumer, "output");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
Expand Down Expand Up @@ -128,7 +129,7 @@ public Function<String, String> listenIn() {
if (in.equals("two")) {
throw new RuntimeException("fail");
}
return in.toUpperCase();
return in.toUpperCase(Locale.ROOT);
};
}

Expand Down
Loading

0 comments on commit 9888a08

Please sign in to comment.