Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test Strimzi Test Container 0.101.0-rc2 #1637

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@
<nats-embedded.version>1.1.2</nats-embedded.version>
<slf4j-log4j12.version>1.7.33</slf4j-log4j12.version>
<opencsv.version>5.6</opencsv.version>
<strimzi-test-container.version>0.100.0</strimzi-test-container.version>
<strimzi-test-container.version>0.101.0</strimzi-test-container.version>
</properties>

<modules>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,14 +207,14 @@ public synchronized void close() {
Uni.createFrom().voidItem().invoke(() -> {
LOGGER.infof("Closing consumer %s", clientId());
if (kafkaConsumer != null) {
polling.compareAndSet(true, false);
kafkaConsumer.close(kafkaApiTimeout);
kafkaConsumer = null;
}
if (consumerExecutor != null) {
consumerExecutor.shutdown();
consumerExecutor = null;
}
polling.compareAndSet(true, false);
}).runSubscriptionOn(getOrCreateExecutor()).subscribeAsCompletionStage();
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,8 @@
import static org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG;

import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.BooleanSupplier;
import java.util.function.Predicate;

Expand Down Expand Up @@ -43,6 +42,7 @@ public class KafkaCompanion implements AutoCloseable {
private final String bootstrapServers;
private final Duration kafkaApiTimeout;
private AdminClient adminClient;
private final List<Runnable> onClose = new CopyOnWriteArrayList<>();

public KafkaCompanion(String bootstrapServers) {
this(bootstrapServers, Duration.ofSeconds(10));
Expand Down Expand Up @@ -73,15 +73,17 @@ public synchronized AdminClient getOrCreateAdminClient() {
if (adminClient == null) {
Map<String, Object> configMap = new HashMap<>(getCommonClientConfig());
configMap.put(BOOTSTRAP_SERVERS_CONFIG, getBootstrapServers());
configMap.put(CLIENT_ID_CONFIG, "companion-admin-for-" + getBootstrapServers());
adminClient = AdminClient.create(configMap);
registerOnClose(() -> adminClient.close(kafkaApiTimeout));
}
return adminClient;
}

@Override
public synchronized void close() {
if (adminClient != null) {
adminClient.close(kafkaApiTimeout);
for (Runnable runnable : new ArrayList<>(onClose)) {
runnable.run();
}
}

Expand Down Expand Up @@ -195,13 +197,19 @@ public Map<String, Object> getConsumerProperties() {
public <K, V> ConsumerBuilder<K, V> consumeWithDeserializers(
Class<? extends Deserializer<?>> keyDeserType,
Class<? extends Deserializer<?>> valueDeserType) {
return new ConsumerBuilder<>(getConsumerProperties(), kafkaApiTimeout, keyDeserType, valueDeserType);
ConsumerBuilder<K, V> builder = new ConsumerBuilder<>(getConsumerProperties(), kafkaApiTimeout, keyDeserType,
valueDeserType);
registerOnClose(builder::close);
return builder;
}

public <K, V> ConsumerBuilder<K, V> consumeWithDeserializers(
Deserializer<K> keyDeserializer,
Deserializer<V> valueDeserializer) {
return new ConsumerBuilder<>(getConsumerProperties(), kafkaApiTimeout, keyDeserializer, valueDeserializer);
ConsumerBuilder<K, V> builder = new ConsumerBuilder<>(getConsumerProperties(), kafkaApiTimeout, keyDeserializer,
valueDeserializer);
registerOnClose(builder::close);
return builder;
}

public <K, V> ConsumerBuilder<K, V> consume(Serde<K> keySerde, Serde<V> valueSerde) {
Expand Down Expand Up @@ -243,17 +251,25 @@ public Map<String, Object> getProducerProperties() {
public <K, V> ProducerBuilder<K, V> produceWithSerializers(
Class<? extends Serializer<?>> keySerializerType,
Class<? extends Serializer<?>> valueSerializerType) {
return new ProducerBuilder<>(getProducerProperties(), kafkaApiTimeout, keySerializerType, valueSerializerType);
ProducerBuilder<K, V> builder = new ProducerBuilder<>(getProducerProperties(), kafkaApiTimeout, keySerializerType,
valueSerializerType);
registerOnClose(builder::close);
return builder;
}

public <K, V> ProducerBuilder<K, V> produceWithSerializers(
Serializer<K> keySerializer,
Serializer<V> valueSerializer) {
return new ProducerBuilder<>(getProducerProperties(), kafkaApiTimeout, keySerializer, valueSerializer);
ProducerBuilder<K, V> builder = new ProducerBuilder<>(getProducerProperties(), kafkaApiTimeout, keySerializer,
valueSerializer);
registerOnClose(builder::close);
return builder;
}

public <K, V> ProducerBuilder<K, V> produce(Serde<K> keySerde, Serde<V> valueSerde) {
return new ProducerBuilder<>(getProducerProperties(), kafkaApiTimeout, keySerde, valueSerde);
ProducerBuilder<K, V> builder = new ProducerBuilder<>(getProducerProperties(), kafkaApiTimeout, keySerde, valueSerde);
registerOnClose(builder::close);
return builder;
}

public <K, V> ProducerBuilder<K, V> produce(Class<K> keyType, Class<V> valueType) {
Expand All @@ -276,4 +292,13 @@ public ProducerBuilder<String, Double> produceDoubles() {
return produce(Serdes.String(), Serdes.Double());
}

private <K, V> void registerOnClose(Runnable action) {
onClose.add(() -> {
try {
action.run();
} catch (Exception ignored) {
// Ignored
}
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
* @param <T> the type of items
* @param <SELF> the reference to self type
*/
public abstract class KafkaTask<T, SELF extends KafkaTask<T, SELF>> implements Iterable<T> {
public abstract class KafkaTask<T, SELF extends KafkaTask<T, SELF>> implements Iterable<T>, AutoCloseable {

/**
* The {@link Multi} to subscribe
Expand Down Expand Up @@ -202,6 +202,11 @@ public SELF stop() {
return self();
}

@Override
public void close() {
stop();
}

public long firstOffset() {
T firstRecord = subscriber.getFirstRecord();
if (firstRecord == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ public synchronized void close() {
if (kafkaProducer != null) {
LOGGER.infof("Closing producer %s", clientId());
// Kafka producer is thread-safe, we can call close on the caller thread
kafkaProducer.flush();
kafkaProducer.close(kafkaApiTimeout);
kafkaProducer = null;
executorService.shutdown();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -469,11 +469,11 @@ private SELF request(long req) {

@Override
public void onSubscribe(Subscription s) {
subscription.set(s);
if (requested.get() > 0) {
s.request(requested.get());
if (subscription.compareAndSet(null, s)) {
if (requested.get() > 0) {
s.request(requested.get());
}
}

}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ public Uni<TopicDescription> waitForTopic(String topic) {
}
return !checkIfTheTopicIsCreated(topic, topics);
})
.select().where(Objects::nonNull)
.toUni()
.map(topics -> topics.get(topic));
}
Expand All @@ -119,14 +120,6 @@ boolean checkIfTheTopicIsCreated(String topic, Map<String, TopicDescription> des
}
return true;
}
//
// boolean checkIfTheTopicIsCreated(String topic, Map<String, TopicDescription> description) {
// return Optional.ofNullable(description)
// .map(topics -> topics.get(topic))
// .map(td -> td.partitions().stream()
// .allMatch(partition -> partition.leader() != null && partition.leader().id() >= 0))
// .orElse(false);
// }

/**
* @return the set of topic names
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,26 @@
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Method;
import java.time.Duration;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.common.Node;
import org.jboss.logging.Logger;
import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.*;
import org.junit.jupiter.api.extension.ExtensionContext.Store.CloseableResource;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import org.testcontainers.shaded.org.awaitility.core.ConditionTimeoutException;

import io.strimzi.test.container.StrimziKafkaContainer;

/**
* Junit extension for creating Strimzi Kafka broker
*/
public class KafkaBrokerExtension implements BeforeAllCallback, ParameterResolver, CloseableResource {
public class KafkaBrokerExtension implements BeforeAllCallback, BeforeEachCallback, ParameterResolver, CloseableResource {
public static final Logger LOGGER = Logger.getLogger(KafkaBrokerExtension.class.getName());

public static final String KAFKA_VERSION = "3.1.0";
Expand Down Expand Up @@ -53,6 +57,9 @@ public static StrimziKafkaContainer createKafkaContainer() {
public static <T extends StrimziKafkaContainer> T configureKafkaContainer(T container) {
String kafkaVersion = System.getProperty("kafka-container-version", KAFKA_VERSION);
container.withKafkaVersion(kafkaVersion);
Map<String, String> config = new HashMap<>();
config.put("log.cleaner.enable", "false");
container.withKafkaConfigurationMap(config);
return container;
}

Expand Down Expand Up @@ -131,6 +138,38 @@ public Object resolveParameter(ParameterContext parameterContext, ExtensionConte
return null;
}

@Override
public void beforeEach(ExtensionContext context) throws Exception {
LOGGER.infof("Running test %s (%s#%s)", context.getDisplayName(),
context.getTestClass().map(Class::getName).orElse(""),
context.getTestMethod().map(Method::getName).orElse(""));
if (kafka != null) {
for (int i = 0; i < 3; i++) {
try {
isBrokerHealthy();
return;
} catch (ConditionTimeoutException e) {
LOGGER.warn("The Kafka broker is not healthy, restarting it");
restart(kafka, 0);
}
}
throw new IllegalStateException("The Kafka broker is not unhealthy, despite 3 restarts");
}
}

private void isBrokerHealthy() {
await().until(() -> kafka.isRunning());
await().catchUncaughtExceptions().until(() -> {
Map<String, Object> config = new HashMap<>();
config.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
config.put(CommonClientConfigs.CLIENT_ID_CONFIG, "broker-healthy-admin");
try (AdminClient admin = AdminClient.create(config)) {
Collection<Node> nodes = admin.describeCluster().nodes().get();
return nodes.size() == 1 && nodes.iterator().next().id() >= 0;
}
});
}

@Target({ ElementType.FIELD, ElementType.PARAMETER })
@Retention(RetentionPolicy.RUNTIME)
public @interface KafkaBootstrapServers {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,15 @@

import java.util.logging.Logger;

import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.*;
import org.junit.jupiter.api.extension.ExtensionContext.Store.CloseableResource;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import org.testcontainers.containers.Network;

/**
* Junit extension for creating Strimzi Kafka broker behind a Toxiproxy
*/
public class KafkaToxiproxyExtension extends KafkaBrokerExtension
implements BeforeAllCallback, ParameterResolver, CloseableResource {
implements BeforeAllCallback, BeforeEachCallback, ParameterResolver, CloseableResource {
public static final Logger LOGGER = Logger.getLogger(KafkaToxiproxyExtension.class.getName());

@Override
Expand All @@ -34,6 +30,12 @@ public void beforeAll(ExtensionContext context) {
}
}

@Override
public void beforeEach(ExtensionContext context) throws Exception {
// Do nothing for the ToxyProxy
// In this case we will not restart unhealthy brokers.
}

@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
Expand Down
Loading