Skip to content

Commit

Permalink
Kafka producer nacking fix 5510 (helidon-io#5531)
Browse files Browse the repository at this point in the history
Signed-off-by: Daniel Kec <[email protected]>
  • Loading branch information
danielkec authored and trentjeff committed Nov 29, 2022
1 parent 10038f1 commit 74de234
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,8 @@ record = new ProducerRecord<>(topic, message.getPayload());
subscription.request(backpressure);
}
});
} else {
message.nack(exception);
}
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,14 @@
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Logger;

import jakarta.enterprise.context.ApplicationScoped;

import io.helidon.common.reactive.Multi;
import io.helidon.common.reactive.Single;

import org.apache.kafka.clients.consumer.ConsumerRecord;
Expand Down Expand Up @@ -305,4 +307,23 @@ public Message<String> channel13ToChannel12(KafkaMessage<Long, String> msg) {
return Message.of(msg.getPayload());
}
}

@ApplicationScoped
public static class Channel14 extends AbstractSampleBean {

AtomicBoolean acked = new AtomicBoolean();
CompletableFuture<Throwable> nacked = new CompletableFuture<>();

public CompletableFuture<Throwable> getNacked() {
return nacked;
}

@Outgoing("test-channel-14")
public Multi<Message<String>> channel14() {
return Multi.just(Message.of("test",
() -> CompletableFuture.completedStage(null).thenRun(() -> acked.set(true)),
t -> CompletableFuture.<Void>completedFuture(null).thenAccept(v -> nacked.complete(t))
));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
Expand All @@ -54,6 +55,7 @@
import org.apache.kafka.common.serialization.StringSerializer;
import org.eclipse.microprofile.config.spi.ConfigProviderResolver;
import org.eclipse.microprofile.reactive.messaging.spi.Connector;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -209,6 +211,13 @@ private static Map<String, String> cdiConfig() {
"mp.messaging.incoming.test-channel-13.group.id", "sameGroup",
"mp.messaging.incoming.test-channel-13.key.deserializer", LongDeserializer.class.getName(),
"mp.messaging.incoming.test-channel-13.value.deserializer", StringDeserializer.class.getName()));
p.putAll(Map.of(
"mp.messaging.outgoing.test-channel-14.connector", KafkaConnector.CONNECTOR_NAME,
"mp.messaging.outgoing.test-channel-14.max.block.ms", "100",
"mp.messaging.outgoing.test-channel-14.bootstrap.servers", KAFKA_SERVER,
"mp.messaging.outgoing.test-channel-14.topic", UNEXISTING_TOPIC,
"mp.messaging.outgoing.test-channel-14.key.serializer", LongSerializer.class.getName(),
"mp.messaging.outgoing.test-channel-14.value.serializer", StringSerializer.class.getName()));
return p;
}

Expand Down Expand Up @@ -253,6 +262,7 @@ private static void cdiContainerUp() {
classes.add(AbstractSampleBean.Channel9.class);
classes.add(AbstractSampleBean.Channel11.class);
classes.add(AbstractSampleBean.Channel12.class);
classes.add(AbstractSampleBean.Channel14.class);
classes.add(MessagingCdiExtension.class);

Map<String, String> p = new HashMap<>(cdiConfig());
Expand Down Expand Up @@ -398,6 +408,15 @@ void kafkaSubscriberConnectionError() throws InterruptedException {
kafkaResource.getKafkaTestUtils().consumeAllRecordsFromTopic(TEST_TOPIC_10);
}

@Test
void kafkaProduceWithNack() throws InterruptedException, ExecutionException, TimeoutException {
LOGGER.fine(() -> "==========> test kafkaProduceWithNack()");
AbstractSampleBean.Channel14 kafkaProdBean = cdiContainer.select(AbstractSampleBean.Channel14.class).get();
Throwable t = kafkaProdBean.getNacked().get(5, TimeUnit.SECONDS);
assertNotNull(t);
assertThat(t.getCause(), Matchers.instanceOf(org.apache.kafka.common.errors.TimeoutException.class));
}

@Test
void kafkaSubscriberSendError() throws InterruptedException {
LOGGER.fine(() -> "==========> test kafkaSubscriberSendError()");
Expand Down

0 comments on commit 74de234

Please sign in to comment.