Skip to content

Commit

Permalink
Optimize imports based on new checkstyle rules
Browse files Browse the repository at this point in the history
  • Loading branch information
sobychacko committed Oct 18, 2024
1 parent 99f1c45 commit da2830d
Show file tree
Hide file tree
Showing 203 changed files with 1,153 additions and 1,212 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@

package com.example;

import static org.awaitility.Awaitility.await;

import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;

Expand All @@ -28,6 +26,8 @@
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.test.annotation.DirtiesContext;

import static org.awaitility.Awaitility.await;

/**
* This test is going to fail from IDE since there is no exposed {@code spring.kafka.bootstrap-servers} system property.
* Use Maven to run tests which enables global embedded Kafka broker via properties provided to Surefire plugin.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@

package com.example;

import static org.assertj.core.api.Assertions.assertThatExceptionOfType;

import java.util.concurrent.TimeUnit;

import org.apache.kafka.common.errors.TimeoutException;
Expand All @@ -29,6 +27,8 @@
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.test.annotation.DirtiesContext;

import static org.assertj.core.api.Assertions.assertThatExceptionOfType;

/**
* This test is going to fail from IDE since there is no exposed {@code spring.kafka.bootstrap-servers} system property.
* This test demonstrates that global embedded Kafka broker config for {@code auto.create.topics.enable=false}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
import java.time.Duration;
import java.util.List;

import static org.assertj.core.api.Assertions.assertThat;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.TestInputTopic;
import org.apache.kafka.streams.TestOutputTopic;
Expand All @@ -37,6 +35,8 @@
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.config.StreamsBuilderFactoryBean;

import static org.assertj.core.api.Assertions.assertThat;

/**
* @author Nacho Munoz
* @author Soby Chacko
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,27 +15,25 @@
*/
package org.springframework.kafka.kdocs.requestreply

import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.kafka.core.KafkaAdmin.NewTopics
import org.springframework.kafka.config.TopicBuilder
import org.springframework.kafka.core.KafkaTemplate
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate
import org.springframework.kafka.support.converter.ByteArrayJsonMessageConverter
import org.springframework.boot.ApplicationRunner
import org.springframework.boot.ApplicationArguments
import org.springframework.kafka.requestreply.RequestReplyTypedMessageFuture
import org.springframework.messaging.support.MessageBuilder
import org.springframework.core.ParameterizedTypeReference
import org.springframework.kafka.annotation.KafkaListener
import org.springframework.messaging.handler.annotation.SendTo
import kotlin.jvm.JvmStatic
import org.apache.kafka.common.serialization.ByteArrayDeserializer
import org.apache.kafka.common.serialization.ByteArraySerializer
import org.slf4j.LoggerFactory
import org.springframework.boot.ApplicationRunner
import org.springframework.boot.SpringApplication
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.context.annotation.Bean
import org.springframework.core.ParameterizedTypeReference
import org.springframework.kafka.annotation.KafkaListener
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory
import org.springframework.kafka.config.TopicBuilder
import org.springframework.kafka.core.KafkaAdmin.NewTopics
import org.springframework.kafka.core.KafkaTemplate
import org.springframework.kafka.core.ProducerFactory
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate
import org.springframework.kafka.requestreply.RequestReplyTypedMessageFuture
import org.springframework.kafka.support.converter.ByteArrayJsonMessageConverter
import org.springframework.messaging.handler.annotation.SendTo
import org.springframework.messaging.support.MessageBuilder
import java.util.concurrent.TimeUnit
import java.util.function.Consumer

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,11 @@
*/
package org.springframework.kafka.kdocs.started.consumer

import org.springframework.boot.autoconfigure.SpringBootApplication
import org.apache.kafka.clients.admin.NewTopic
import org.springframework.kafka.annotation.KafkaListener
import kotlin.jvm.JvmStatic
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication
import org.springframework.context.annotation.Bean
import org.springframework.kafka.annotation.KafkaListener
import org.springframework.kafka.kdocs.started.producer.Application

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,7 @@
*/
package org.springframework.kafka.kdocs.started.noboot

import org.springframework.boot.runApplication
import org.springframework.kafka.core.KafkaTemplate
import kotlin.jvm.JvmStatic
import org.springframework.context.annotation.AnnotationConfigApplicationContext
import org.springframework.kafka.kdocs.started.producer.Application

/**
* Code snippet for quick start.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,13 @@
import java.util.Map;
import java.util.Set;

import kafka.server.KafkaConfig;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.Consumer;

import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;

import kafka.server.KafkaConfig;

/**
* @author Gary Russell
* @since 3.1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,13 @@
import java.util.function.Function;
import java.util.stream.Collectors;

import kafka.cluster.EndPoint;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.CoreUtils;
import kafka.utils.TestUtils;
import kafka.zk.ZkFourLetterWords;
import kafka.zookeeper.ZooKeeperClient;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
Expand All @@ -67,14 +74,6 @@
import org.springframework.retry.support.RetryTemplate;
import org.springframework.util.Assert;

import kafka.cluster.EndPoint;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.CoreUtils;
import kafka.utils.TestUtils;
import kafka.zk.ZkFourLetterWords;
import kafka.zookeeper.ZooKeeperClient;

/**
* An embedded Kafka Broker(s) and Zookeeper manager.
* This class is intended to be used in the unit tests.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@

package org.springframework.kafka.test.core;

import org.springframework.util.Assert;

import kafka.cluster.BrokerEndPoint;

import org.springframework.util.Assert;

/**
* Encapsulates the address of a Kafka broker.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Function;

import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationRegistry;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.consumer.Consumer;
Expand Down Expand Up @@ -82,9 +84,6 @@
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;

import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationRegistry;

/**
* A template for executing high-level operations. When used with a
* {@link DefaultKafkaProducerFactory}, the template is thread-safe. The producer factory
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,11 @@
import java.util.List;
import java.util.Map;

import org.apache.kafka.clients.consumer.Consumer;

import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.binder.kafka.KafkaClientMetrics;
import org.apache.kafka.clients.consumer.Consumer;

/**
* A consumer factory listener that manages {@link KafkaClientMetrics}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,11 @@
import java.util.List;
import java.util.Map;

import org.apache.kafka.clients.producer.Producer;

import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.binder.kafka.KafkaClientMetrics;
import org.apache.kafka.clients.producer.Producer;

/**
* A producer factory listener that manages {@link KafkaClientMetrics}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,6 @@
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;

import org.springframework.util.Assert;

import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.kafka.receiver.KafkaReceiver;
Expand All @@ -42,6 +39,8 @@
import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;

import org.springframework.util.Assert;

/**
* Reactive kafka consumer operations implementation.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,6 @@
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.PartitionInfo;
import org.reactivestreams.Publisher;

import org.springframework.beans.factory.DisposableBean;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.kafka.support.converter.MessagingMessageConverter;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.messaging.Message;
import org.springframework.util.Assert;

import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.kafka.sender.KafkaSender;
Expand All @@ -44,6 +36,13 @@
import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;

import org.springframework.beans.factory.DisposableBean;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.kafka.support.converter.MessagingMessageConverter;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.messaging.Message;
import org.springframework.util.Assert;

/**
* Reactive kafka producer operations implementation.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationRegistry;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.consumer.CommitFailedException;
import org.apache.kafka.clients.consumer.Consumer;
Expand Down Expand Up @@ -134,9 +136,6 @@
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;

import io.micrometer.observation.Observation;
import io.micrometer.observation.ObservationRegistry;

/**
* Single-threaded Message listener container using the Java {@link Consumer} supporting
* auto-partition assignment or user-configured assignment.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,14 @@
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.TopicPartition;
import reactor.core.publisher.Mono;

import org.springframework.expression.ParserContext;
import org.springframework.expression.common.TemplateParserContext;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.lang.Nullable;
import org.springframework.util.ClassUtils;

import reactor.core.publisher.Mono;

/**
* Utilities for listener adapters.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@

package org.springframework.kafka.listener.adapter;

import reactor.core.publisher.Mono;

import org.springframework.core.MethodParameter;
import org.springframework.messaging.Message;
import org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolver;

import reactor.core.publisher.Mono;

/**
* No-op resolver for method arguments of type {@link kotlin.coroutines.Continuation}.
* <p>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.TopicPartition;
import reactor.core.publisher.Mono;

import org.springframework.context.expression.MapAccessor;
import org.springframework.core.MethodParameter;
Expand Down Expand Up @@ -73,8 +74,6 @@
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;

import reactor.core.publisher.Mono;

/**
* An abstract {@link org.springframework.kafka.listener.MessageListener} adapter
* providing the necessary infrastructure to extract the payload of a
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,13 @@
import java.util.List;
import java.util.Map;

import org.apache.kafka.streams.KafkaStreams;

import org.springframework.kafka.config.StreamsBuilderFactoryBean;

import io.micrometer.core.instrument.ImmutableTag;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.binder.kafka.KafkaStreamsMetrics;
import org.apache.kafka.streams.KafkaStreams;

import org.springframework.kafka.config.StreamsBuilderFactoryBean;

/**
* Creates a {@link KafkaStreamsMetrics} for the {@link KafkaStreams}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@
import java.util.Map;
import java.util.Set;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeader;
Expand All @@ -35,10 +38,6 @@
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;

/**
* Default header mapper for Apache Kafka.
* Most headers in {@link KafkaHeaders} are not mapped on outbound messages.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@

import java.io.IOException;

import org.springframework.util.MimeType;

import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;

import org.springframework.util.MimeType;

/**
* A {@link SimpleModule} extension for {@link MimeType} serialization.
*
Expand Down
Loading

0 comments on commit da2830d

Please sign in to comment.