diff --git a/.github/quarkus-github-bot.yml b/.github/quarkus-github-bot.yml index da85327059847..76b5432189cb6 100644 --- a/.github/quarkus-github-bot.yml +++ b/.github/quarkus-github-bot.yml @@ -2,6 +2,17 @@ # The format of this file is documented here: # https://github.com/quarkusio/quarkus-bot#triage-issues features: [ALL] +workflows: + rules: + - allow: + users: + minContributions: 10 + files: + - "**/*.md" + - "**/*.adoc" + unless: + files: + - ".github/**" workflowRunAnalysis: workflows: ["Quarkus CI"] projectsClassic: diff --git a/.github/workflows/ci-actions-incremental.yml b/.github/workflows/ci-actions-incremental.yml index 01f4d938f4420..f8f1b5253b0f2 100644 --- a/.github/workflows/ci-actions-incremental.yml +++ b/.github/workflows/ci-actions-incremental.yml @@ -331,7 +331,7 @@ jobs: java-version: ${{ matrix.java.java-version }} - name: Download Maven Repo - uses: actions/download-artifact@v1 + uses: actions/download-artifact@v3 with: name: maven-repo path: . @@ -413,7 +413,7 @@ jobs: run: git config --global core.longpaths true - uses: actions/checkout@v3 - name: Download Maven Repo - uses: actions/download-artifact@v1 + uses: actions/download-artifact@v3 with: name: maven-repo path: . @@ -488,7 +488,7 @@ jobs: run: git config --global core.longpaths true - uses: actions/checkout@v3 - name: Download Maven Repo - uses: actions/download-artifact@v1 + uses: actions/download-artifact@v3 with: name: maven-repo path: . @@ -554,7 +554,7 @@ jobs: run: git config --global core.longpaths true - uses: actions/checkout@v3 - name: Download Maven Repo - uses: actions/download-artifact@v1 + uses: actions/download-artifact@v3 with: name: maven-repo path: . @@ -610,7 +610,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Download Maven Repo - uses: actions/download-artifact@v1 + uses: actions/download-artifact@v3 with: name: maven-repo path: . @@ -661,7 +661,7 @@ jobs: distribution: temurin java-version: 11 - name: Download Maven Repo - uses: actions/download-artifact@v1 + uses: actions/download-artifact@v3 with: name: maven-repo path: . @@ -746,7 +746,7 @@ jobs: run: | cat <<< $(jq '.HttpHeaders += {"User-Agent": "Quarkus-CI-Docker-Client"}' ~/.docker/config.json) > ~/.docker/config.json - name: Download Maven Repo - uses: actions/download-artifact@v1 + uses: actions/download-artifact@v3 with: name: maven-repo path: . diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a4518c2984aeb..07e2bb691bc79 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -35,6 +35,7 @@ fixes, documentation, examples... But first, read this page (including the small - [Automatic incremental build](#automatic-incremental-build) * [Special case `bom-descriptor-json`](#special-case--bom-descriptor-json-) * [Usage by CI](#usage-by-ci) +* [Release your own version](#release) * [Documentation](#documentation) + [Building the documentation](#building-the-documentation) + [Referencing a new guide in the index](#referencing-a-new-guide-in-the-index) @@ -502,6 +503,33 @@ CI is using a slightly different GIB config than locally: For more details see the `Get GIB arguments` step in `.github/workflows/ci-actions-incremental.yml`. +## Release your own version + +You might want to release your own patched version of Quarkus to an internal repository. + +To do so, you will first need to update the version in the source code: + +```shell +./update-version.sh "x.y.z-yourcompany" +``` + +We use a shell script as we also need to update the version in various descriptors and test files. +The shell script calls `./mvnw versions:set` under the hood, among other things. + +Commit the changes, then run: + +```shell +./mvnw --settings your-maven-settings.xml \ + clean deploy \ + -DskipTests -DskipITs \ + -DperformRelease=true \ + -Prelease \ + -Ddokka \ + -Dgpg.skip +``` + +If your Maven settings are in your global Maven settings file located in the `.m2/` directory, you can drop the `--settings your-maven-settings.xml` part. + ## Documentation The documentation is hosted in the [`docs` module](https://github.com/quarkusio/quarkus/tree/main/docs) of the main diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 3af4a595da40b..e1b284c5a8587 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -28,7 +28,7 @@ 1.17.0-alpha 1.8.1 4.1.9 - 1.9.3 + 1.9.4 2.1.12 0.22.0 2.0.1 @@ -41,7 +41,7 @@ 1.0 1.13.1 2.12.0 - 3.2.1 + 3.3.0 3.0.5 2.2.1 1.7.1 @@ -53,7 +53,7 @@ 2.7.0 2.26.0 3.18.0 - 1.1.2 + 1.2.0 1.2.1 1.3.5 3.0.4 @@ -138,7 +138,7 @@ 1.0.3 3.5.0.Final 1.7.0 - 3.2.1 + 3.2.2 1.8.0 1.1.8.4 0.100.0 @@ -158,7 +158,7 @@ 1.0.10 9.3.0 1.0.11 - 4.15.0 + 4.16.0 1.32 6.0.0 4.7.1 diff --git a/build-parent/pom.xml b/build-parent/pom.xml index e2d6e9c1b05eb..2a9d7f36089c1 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -134,6 +134,8 @@ 4.6.1 + 0.9.15 + 1.9.1 6.1.2 3.6.1 5.62.2 @@ -309,6 +311,16 @@ bootstrap ${webjar.bootstrap.version} + + org.webjars + bootstrap-multiselect + ${webjar.bootstrap-multiselect.version} + + + org.webjars.npm + bootstrap-icons + ${webjar.bootstrap-icons.version} + org.webjars font-awesome diff --git a/core/deployment/src/main/java/io/quarkus/deployment/IsDockerWorking.java b/core/deployment/src/main/java/io/quarkus/deployment/IsDockerWorking.java index 861e77cb98b0f..61121cc405474 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/IsDockerWorking.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/IsDockerWorking.java @@ -1,11 +1,8 @@ package io.quarkus.deployment; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; import java.lang.reflect.InvocationTargetException; import java.net.InetSocketAddress; import java.net.Socket; @@ -15,7 +12,6 @@ import java.util.List; import java.util.Optional; import java.util.function.BooleanSupplier; -import java.util.function.Function; import java.util.function.Supplier; import org.eclipse.microprofile.config.ConfigProvider; @@ -176,29 +172,6 @@ public Result get() { } } - public static class OutputFilter implements Function { - private final StringBuilder builder = new StringBuilder(); - - @Override - public Runnable apply(InputStream is) { - return () -> { - - try (InputStreamReader isr = new InputStreamReader(is); - BufferedReader reader = new BufferedReader(isr)) { - - for (String line = reader.readLine(); line != null; line = reader.readLine()) { - builder.append(line); - } - } catch (IOException e) { - throw new RuntimeException("Error reading stream.", e); - } - }; - } - - public String getOutput() { - return builder.toString(); - } - } } private enum Result { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/OutputFilter.java b/core/deployment/src/main/java/io/quarkus/deployment/OutputFilter.java new file mode 100644 index 0000000000000..1d3af6e190939 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/OutputFilter.java @@ -0,0 +1,31 @@ +package io.quarkus.deployment; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.function.Function; + +public class OutputFilter implements Function { + private final StringBuilder builder = new StringBuilder(); + + @Override + public Runnable apply(InputStream is) { + return () -> { + + try (InputStreamReader isr = new InputStreamReader(is); + BufferedReader reader = new BufferedReader(isr)) { + + for (String line = reader.readLine(); line != null; line = reader.readLine()) { + builder.append(line); + } + } catch (IOException e) { + throw new RuntimeException("Error reading stream.", e); + } + }; + } + + public String getOutput() { + return builder.toString(); + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java index 418af27f5bf1f..e1fa11ba794b9 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingResourceProcessor.java @@ -1,7 +1,9 @@ package io.quarkus.deployment.logging; +import java.lang.reflect.Modifier; import java.nio.file.Files; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; @@ -13,6 +15,7 @@ import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Supplier; +import java.util.logging.Filter; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; @@ -29,9 +32,13 @@ import org.aesh.command.completer.OptionCompleter; import org.aesh.command.invocation.CommandInvocation; import org.aesh.command.option.Option; +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; +import org.jboss.jandex.ClassInfo; import org.jboss.jandex.CompositeIndex; import org.jboss.jandex.DotName; import org.jboss.jandex.IndexView; +import org.jboss.jandex.MethodInfo; import org.jboss.logging.Logger; import org.jboss.logmanager.EmbeddedConfigurator; import org.jboss.logmanager.LogManager; @@ -49,6 +56,7 @@ import io.quarkus.deployment.annotations.Produce; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.ApplicationArchivesBuildItem; +import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.ConsoleCommandBuildItem; import io.quarkus.deployment.builditem.ConsoleFormatterBannerBuildItem; import io.quarkus.deployment.builditem.CuratedApplicationShutdownBuildItem; @@ -65,6 +73,7 @@ import io.quarkus.deployment.builditem.SystemPropertyBuildItem; import io.quarkus.deployment.builditem.WebSocketLogHandlerBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageSystemPropertyBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; import io.quarkus.deployment.console.ConsoleInstalledBuildItem; @@ -79,6 +88,7 @@ import io.quarkus.deployment.pkg.builditem.BuildSystemTargetBuildItem; import io.quarkus.deployment.pkg.steps.NativeOrNativeSourcesBuild; import io.quarkus.deployment.recording.RecorderContext; +import io.quarkus.deployment.util.JandexUtil; import io.quarkus.dev.console.CurrentAppExceptionHighlighter; import io.quarkus.dev.spi.DevModeType; import io.quarkus.gizmo.AnnotationCreator; @@ -91,11 +101,13 @@ import io.quarkus.gizmo.MethodCreator; import io.quarkus.gizmo.MethodDescriptor; import io.quarkus.gizmo.ResultHandle; +import io.quarkus.logging.LoggingFilter; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.configuration.ConfigInstantiator; import io.quarkus.runtime.console.ConsoleRuntimeConfig; import io.quarkus.runtime.logging.CategoryBuildTimeConfig; import io.quarkus.runtime.logging.CleanupFilterConfig; +import io.quarkus.runtime.logging.DiscoveredLogComponents; import io.quarkus.runtime.logging.InheritableLevel; import io.quarkus.runtime.logging.LogBuildTimeConfig; import io.quarkus.runtime.logging.LogCleanupFilterElement; @@ -114,6 +126,13 @@ public final class LoggingResourceProcessor { "isMinLevelEnabled", boolean.class, int.class, String.class); + private static final DotName LOGGING_FILTER = DotName.createSimple(LoggingFilter.class.getName()); + private static final DotName FILTER = DotName.createSimple(Filter.class.getName()); + private static final String ILLEGAL_LOGGING_FILTER_USE_MESSAGE = "'@" + LoggingFilter.class.getName() + + "' can only be used on classes that implement '" + + Filter.class.getName() + "' and that are marked as final."; + private static final String[] EMPTY_STRING_ARRAY = new String[0]; + @BuildStep void setupLogFilters(BuildProducer filters) { filters.produce(new LogCleanupFilterBuildItem("org.jboss.threads", "JBoss Threads version")); @@ -204,6 +223,7 @@ void miscSetup( @Record(ExecutionTime.RUNTIME_INIT) LoggingSetupBuildItem setupLoggingRuntimeInit(RecorderContext context, LoggingSetupRecorder recorder, LogConfig log, LogBuildTimeConfig buildLog, + CombinedIndexBuildItem combinedIndexBuildItem, LogCategoryMinLevelDefaultsBuildItem categoryMinLevelDefaults, Optional logStreamHandlerBuildItem, List handlerBuildItems, @@ -214,7 +234,8 @@ LoggingSetupBuildItem setupLoggingRuntimeInit(RecorderContext context, LoggingSe List logStreamBuildItems, BuildProducer shutdownListenerBuildItemBuildProducer, LaunchModeBuildItem launchModeBuildItem, - List logCleanupFilters) { + List logCleanupFilters, + BuildProducer reflectiveClassBuildItemBuildProducer) { if (!launchModeBuildItem.isAuxiliaryApplication() || launchModeBuildItem.getAuxiliaryDevModeType().orElse(null) == DevModeType.TEST_ONLY) { final List>> handlers = handlerBuildItems.stream() @@ -245,13 +266,22 @@ LoggingSetupBuildItem setupLoggingRuntimeInit(RecorderContext context, LoggingSe .map(LogFileFormatBuildItem::getFormatterValue).collect(Collectors.toList()); context.registerSubstitution(InheritableLevel.ActualLevel.class, String.class, InheritableLevel.Substitution.class); context.registerSubstitution(InheritableLevel.Inherited.class, String.class, InheritableLevel.Substitution.class); + + DiscoveredLogComponents discoveredLogComponents = discoverLogComponents(combinedIndexBuildItem.getIndex()); + if (!discoveredLogComponents.getNameToFilterClass().isEmpty()) { + reflectiveClassBuildItemBuildProducer.produce(new ReflectiveClassBuildItem(true, false, false, + discoveredLogComponents.getNameToFilterClass().values().toArray( + EMPTY_STRING_ARRAY))); + } + shutdownListenerBuildItemBuildProducer.produce(new ShutdownListenerBuildItem( - recorder.initializeLogging(log, buildLog, categoryMinLevelDefaults.content, alwaysEnableLogStream, + recorder.initializeLogging(log, buildLog, discoveredLogComponents, + categoryMinLevelDefaults.content, alwaysEnableLogStream, devUiLogHandler, handlers, namedHandlers, consoleFormatItems.stream().map(LogConsoleFormatBuildItem::getFormatterValue) .collect(Collectors.toList()), possibleFileFormatters, - possibleSupplier, launchModeBuildItem.getLaunchMode()))); + possibleSupplier, launchModeBuildItem.getLaunchMode(), true))); LogConfig logConfig = new LogConfig(); ConfigInstantiator.handleObject(logConfig); for (LogCleanupFilterBuildItem i : logCleanupFilters) { @@ -276,6 +306,61 @@ public void run() { return new LoggingSetupBuildItem(); } + private DiscoveredLogComponents discoverLogComponents(IndexView index) { + Collection loggingFilterInstances = index.getAnnotations(LOGGING_FILTER); + DiscoveredLogComponents result = new DiscoveredLogComponents(); + + Map filtersMap = new HashMap<>(); + for (AnnotationInstance instance : loggingFilterInstances) { + AnnotationTarget target = instance.target(); + if (target.kind() != AnnotationTarget.Kind.CLASS) { + throw new IllegalStateException("Unimplemented mode of use of '" + LoggingFilter.class.getName() + "'"); + } + ClassInfo classInfo = target.asClass(); + if (!Modifier.isFinal(classInfo.flags())) { + throw new RuntimeException( + ILLEGAL_LOGGING_FILTER_USE_MESSAGE + " Offending class is '" + classInfo.name() + "'"); + } + boolean isFilterImpl = false; + ClassInfo currentClassInfo = classInfo; + while ((currentClassInfo != null) && (!JandexUtil.DOTNAME_OBJECT.equals(currentClassInfo.name()))) { + boolean hasFilterInterface = false; + List ifaces = currentClassInfo.interfaceNames(); + for (DotName iface : ifaces) { + if (FILTER.equals(iface)) { + hasFilterInterface = true; + break; + } + } + if (hasFilterInterface) { + isFilterImpl = true; + break; + } + currentClassInfo = index.getClassByName(currentClassInfo.superName()); + } + if (!isFilterImpl) { + throw new RuntimeException( + ILLEGAL_LOGGING_FILTER_USE_MESSAGE + " Offending class is '" + classInfo.name() + "'"); + } + + MethodInfo ctor = classInfo.method(""); + if ((ctor == null) || (ctor.typeParameters().size() > 0)) { + throw new RuntimeException("Classes annotated with '" + LoggingFilter.class.getName() + + "' must have a no-args constructor. Offending class is '" + classInfo.name() + "'"); + } + String filterName = instance.value("name").asString(); + if (filtersMap.containsKey(filterName)) { + throw new RuntimeException("Filter '" + filterName + "' was defined multiple times."); + } + filtersMap.put(filterName, classInfo.name().toString()); + } + if (!filtersMap.isEmpty()) { + result.setNameToFilterClass(filtersMap); + } + + return result; + } + @BuildStep(onlyIfNot = IsNormal.class) @Produce(TestSetupBuildItem.class) @Produce(LogConsoleFormatBuildItem.class) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalContainerRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalContainerRunner.java index b63f2cb92886f..1be0521d44027 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalContainerRunner.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalContainerRunner.java @@ -2,37 +2,112 @@ import static io.quarkus.deployment.pkg.steps.LinuxIDUtil.getLinuxID; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.LinkOption; import java.nio.file.Path; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; import org.apache.commons.lang3.SystemUtils; +import org.jboss.logging.Logger; +import io.quarkus.deployment.OutputFilter; import io.quarkus.deployment.pkg.NativeConfig; +import io.quarkus.deployment.util.ExecUtil; import io.quarkus.deployment.util.FileUtil; import io.quarkus.runtime.util.ContainerRuntimeUtil; public class NativeImageBuildLocalContainerRunner extends NativeImageBuildContainerRunner { + private static final Logger LOGGER = Logger.getLogger(NativeImageBuildLocalContainerRunner.class.getName()); + public NativeImageBuildLocalContainerRunner(NativeConfig nativeConfig, Path outputDir) { super(nativeConfig, outputDir); if (SystemUtils.IS_OS_LINUX) { ArrayList containerRuntimeArgs = new ArrayList<>(Arrays.asList(baseContainerRuntimeArgs)); - String uid = getLinuxID("-ur"); - String gid = getLinuxID("-gr"); - if (uid != null && gid != null && !uid.isEmpty() && !gid.isEmpty()) { - Collections.addAll(containerRuntimeArgs, "--user", uid + ":" + gid); - if (containerRuntime == ContainerRuntimeUtil.ContainerRuntime.PODMAN) { - // Needed to avoid AccessDeniedExceptions - containerRuntimeArgs.add("--userns=keep-id"); + if (isDockerRootless(containerRuntime)) { + Collections.addAll(containerRuntimeArgs, "--user", String.valueOf(0)); + } else { + String uid = getLinuxID("-ur"); + String gid = getLinuxID("-gr"); + if (uid != null && gid != null && !uid.isEmpty() && !gid.isEmpty()) { + Collections.addAll(containerRuntimeArgs, "--user", uid + ":" + gid); + if (containerRuntime == ContainerRuntimeUtil.ContainerRuntime.PODMAN) { + // Needed to avoid AccessDeniedExceptions + containerRuntimeArgs.add("--userns=keep-id"); + } } } baseContainerRuntimeArgs = containerRuntimeArgs.toArray(baseContainerRuntimeArgs); } } + private static boolean isDockerRootless(ContainerRuntimeUtil.ContainerRuntime containerRuntime) { + if (containerRuntime != ContainerRuntimeUtil.ContainerRuntime.DOCKER) { + return false; + } + String dockerEndpoint = fetchDockerEndpoint(); + // docker socket? + String socketUriPrefix = "unix://"; + if (dockerEndpoint == null || !dockerEndpoint.startsWith(socketUriPrefix)) { + return false; + } + String dockerSocket = dockerEndpoint.substring(socketUriPrefix.length()); + String currentUid = getLinuxID("-ur"); + if (currentUid == null || currentUid.isEmpty() || currentUid.equals(String.valueOf(0))) { + return false; + } + + int socketOwnerUid; + try { + socketOwnerUid = (int) Files.getAttribute(Path.of(dockerSocket), "unix:uid", LinkOption.NOFOLLOW_LINKS); + } catch (IOException e) { + LOGGER.infof("Owner UID lookup on '%s' failed with '%s'", dockerSocket, e.getMessage()); + return false; + } + return currentUid.equals(String.valueOf(socketOwnerUid)); + } + + private static String fetchDockerEndpoint() { + // DOCKER_HOST environment variable overrides the active context + String dockerHost = System.getenv("DOCKER_HOST"); + if (dockerHost != null) { + return dockerHost; + } + + OutputFilter outputFilter = new OutputFilter(); + if (!ExecUtil.execWithTimeout(new File("."), outputFilter, Duration.ofMillis(3000), + "docker", "context", "ls", "--format", + "'{{- if .Current -}} {{- .DockerEndpoint -}} {{- end -}}'")) { + LOGGER.debug("Docker context lookup didn't succeed in time"); + return null; + } + + Set endpoints = outputFilter.getOutput().lines() + .filter(Objects::nonNull) + .filter(Predicate.not(String::isBlank)) + .collect(Collectors.toSet()); + if (endpoints.size() == 1) { + return endpoints.stream().findFirst().orElse(null); + } + if (LOGGER.isDebugEnabled()) { + LOGGER.debugf("Found too many active Docker endpoints: [%s]", + endpoints.stream() + .map(endpoint -> String.format("'%s'", endpoint)) + .collect(Collectors.joining(","))); + } + return null; + } + @Override protected List getContainerRuntimeBuildArgs() { List containerRuntimeArgs = super.getContainerRuntimeBuildArgs(); @@ -45,4 +120,5 @@ protected List getContainerRuntimeBuildArgs() { volumeOutputPath + ":" + NativeImageBuildStep.CONTAINER_BUILD_VOLUME_PATH + ":z"); return containerRuntimeArgs; } + } diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java b/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java index 3f99646027c9b..60ad1ab17f006 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/Constants.java @@ -28,6 +28,8 @@ final public class Constants { public static final String DASH = "-"; public static final String ADOC_EXTENSION = ".adoc"; public static final String DIGIT_OR_LOWERCASE = "^[a-z0-9]+$"; + public static final String NEW_LINE = "\n"; + public static final String SECTION_TITLE_L1 = "= "; public static final String PARENT = "<>"; public static final String NO_DEFAULT = "<>"; diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java b/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java index 779b222239201..4b0384fc1678c 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java @@ -460,7 +460,7 @@ private void recordMappingJavadoc(final TypeElement clazz, final Properties java for (Element e : clazz.getEnclosedElements()) { switch (e.getKind()) { case INTERFACE: { - recordMappingJavadoc(((TypeElement) e), javadocProps); + recordMappingJavadoc(((TypeElement) e)); break; } diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java index f7b9447c448d6..17a93029367ee 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoItemFinder.java @@ -19,6 +19,7 @@ import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.hyphenate; import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.hyphenateEnumValue; import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.stringifyType; +import static javax.lang.model.element.Modifier.ABSTRACT; import java.io.IOException; import java.time.Duration; @@ -86,11 +87,11 @@ public ConfigDoItemFinder(Set configRoots, * */ ScannedConfigDocsItemHolder findInMemoryConfigurationItems() throws IOException { - for (Map.Entry entry : configGroupQualifiedNameToTypeElementMap.entrySet()) { ConfigPhase buildTime = ConfigPhase.BUILD_TIME; - final List configDocItems = recursivelyFindConfigItems( - entry.getValue(), EMPTY, EMPTY, buildTime, false, false, 1, false); + final List configDocItems = recursivelyFindConfigItems(entry.getValue(), EMPTY, EMPTY, buildTime, + false, 1, + false); allConfigurationGroups.put(entry.getKey(), OBJECT_MAPPER.writeValueAsString(configDocItems)); } @@ -99,9 +100,8 @@ ScannedConfigDocsItemHolder findInMemoryConfigurationItems() throws IOException final TypeElement element = configRootInfo.getClazz(); String rootName = configRootInfo.getName(); ConfigPhase configPhase = configRootInfo.getConfigPhase(); - boolean isMapping = configRootInfo.isMapping(); final List configDocItems = recursivelyFindConfigItems(element, rootName, rootName, configPhase, - isMapping, false, sectionLevel, true); + false, sectionLevel, true); holder.addConfigRootItems(configRootInfo, configDocItems); allConfigurationRoots.put(configRootInfo.getClazz().toString(), OBJECT_MAPPER.writeValueAsString(configDocItems)); } @@ -113,8 +113,7 @@ ScannedConfigDocsItemHolder findInMemoryConfigurationItems() throws IOException * Recursively find config item found in a config root or config group given as {@link Element} */ private List recursivelyFindConfigItems(Element element, String rootName, String parentName, - ConfigPhase configPhase, boolean isMapping, boolean withinAMap, int sectionLevel, - boolean generateSeparateConfigGroupDocsFiles) + ConfigPhase configPhase, boolean withinAMap, int sectionLevel, boolean generateSeparateConfigGroupDocsFiles) throws JsonProcessingException { List configDocItems = new ArrayList<>(); TypeElement asTypeElement = (TypeElement) element; @@ -130,7 +129,7 @@ private List recursivelyFindConfigItems(Element element, String r if (rawConfigItems == null) { // element not yet scanned Element superElement = ((DeclaredType) superType).asElement(); superTypeConfigItems = recursivelyFindConfigItems(superElement, rootName, parentName, - configPhase, isMapping, withinAMap, sectionLevel, generateSeparateConfigGroupDocsFiles); + configPhase, withinAMap, sectionLevel, generateSeparateConfigGroupDocsFiles); } else { superTypeConfigItems = OBJECT_MAPPER.readValue(rawConfigItems, LIST_OF_CONFIG_ITEMS_TYPE_REF); } @@ -139,7 +138,9 @@ private List recursivelyFindConfigItems(Element element, String r } for (Element enclosedElement : element.getEnclosedElements()) { - if (!enclosedElement.getKind().isField() && (!isMapping || !enclosedElement.getKind().equals(ElementKind.METHOD))) { + shouldProcessElement(enclosedElement); + + if (!shouldProcessElement(enclosedElement)) { continue; } @@ -221,15 +222,13 @@ private List recursivelyFindConfigItems(Element element, String r } // Mappings - if (isMapping) { - for (Map.Entry entry : annotationMirror - .getElementValues().entrySet()) { - Object value = entry.getValue().getValue(); - if (annotationName.equals(ANNOTATION_CONFIG_WITH_NAME)) { - name = parentName + DOT + value; - } else if (annotationName.equals(ANNOTATION_CONFIG_WITH_DEFAULT)) { - defaultValue = value.toString(); - } + for (Map.Entry entry : annotationMirror + .getElementValues().entrySet()) { + Object value = entry.getValue().getValue(); + if (annotationName.equals(ANNOTATION_CONFIG_WITH_NAME)) { + name = parentName + DOT + value; + } else if (annotationName.equals(ANNOTATION_CONFIG_WITH_DEFAULT)) { + defaultValue = value.toString(); } } } @@ -254,7 +253,7 @@ private List recursivelyFindConfigItems(Element element, String r if (isConfigGroup(type)) { List groupConfigItems = readConfigGroupItems(configPhase, rootName, name, type, - configSection, isMapping, withinAMap, generateSeparateConfigGroupDocsFiles); + configSection, withinAMap, generateSeparateConfigGroupDocsFiles); DocGeneratorUtil.appendConfigItemsIntoExistingOnes(configDocItems, groupConfigItems); } else { final ConfigDocKey configDocKey = new ConfigDocKey(); @@ -278,7 +277,7 @@ private List recursivelyFindConfigItems(Element element, String r if (isConfigGroup(type)) { name += String.format(NAMED_MAP_CONFIG_ITEM_FORMAT, configDocMapKey); List groupConfigItems = readConfigGroupItems(configPhase, rootName, name, type, - configSection, isMapping, true, generateSeparateConfigGroupDocsFiles); + configSection, true, generateSeparateConfigGroupDocsFiles); DocGeneratorUtil.appendConfigItemsIntoExistingOnes(configDocItems, groupConfigItems); continue; } else { @@ -304,8 +303,7 @@ private List recursivelyFindConfigItems(Element element, String r } configSection.setOptional(true); List groupConfigItems = readConfigGroupItems(configPhase, rootName, name, - typeInString, configSection, isMapping, withinAMap, - generateSeparateConfigGroupDocsFiles); + typeInString, configSection, withinAMap, generateSeparateConfigGroupDocsFiles); DocGeneratorUtil.appendConfigItemsIntoExistingOnes(configDocItems, groupConfigItems); continue; } else if ((typeInString.startsWith(List.class.getName()) @@ -390,6 +388,20 @@ private boolean isConfigGroup(String type) { return configGroupQualifiedNameToTypeElementMap.containsKey(type) || allConfigurationGroups.hasKey(type); } + private boolean shouldProcessElement(final Element enclosedElement) { + if (enclosedElement.getKind().isField()) { + return true; + } + + // A ConfigMapping method + if (enclosedElement.getKind().equals(ElementKind.METHOD)) { + Element enclosingElement = enclosedElement.getEnclosingElement(); + return enclosingElement.getModifiers().contains(ABSTRACT) && enclosedElement.getModifiers().contains(ABSTRACT); + } + + return false; + } + private String simpleTypeToString(TypeMirror typeMirror) { if (typeMirror.getKind().isPrimitive()) { return typeMirror.toString(); @@ -456,8 +468,8 @@ private boolean isDurationType(TypeMirror realTypeMirror) { * */ private List readConfigGroupItems(ConfigPhase configPhase, String topLevelRootName, String parentName, - String configGroup, ConfigDocSection configSection, boolean isMapping, boolean withinAMap, - boolean generateSeparateConfigGroupDocs) throws JsonProcessingException { + String configGroup, ConfigDocSection configSection, boolean withinAMap, boolean generateSeparateConfigGroupDocs) + throws JsonProcessingException { configSection.setConfigGroupType(configGroup); if (configSection.getSectionDetailsTitle() == null) { @@ -475,7 +487,7 @@ private List readConfigGroupItems(ConfigPhase configPhase, String groupConfigItems = OBJECT_MAPPER.readValue(property, LIST_OF_CONFIG_ITEMS_TYPE_REF); } else { TypeElement configGroupTypeElement = configGroupQualifiedNameToTypeElementMap.get(configGroup); - groupConfigItems = recursivelyFindConfigItems(configGroupTypeElement, EMPTY, EMPTY, configPhase, isMapping, + groupConfigItems = recursivelyFindConfigItems(configGroupTypeElement, EMPTY, EMPTY, configPhase, false, 1, generateSeparateConfigGroupDocs); allConfigurationGroups.put(configGroup, OBJECT_MAPPER.writeValueAsString(groupConfigItems)); } diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoc.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoc.java new file mode 100644 index 0000000000000..0ad3e5a327d4d --- /dev/null +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDoc.java @@ -0,0 +1,20 @@ +package io.quarkus.annotation.processor.generate_doc; + +import java.io.IOException; +import java.io.Writer; +import java.util.List; + +/** + * Represent one output file, its items are going to be appended to the file + */ +interface ConfigDoc { + + List getWriteItems(); + + /** + * An item is a summary table, note below the table, ... + */ + interface WriteItem { + void accept(Writer writer) throws IOException; + } +} diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocBuilder.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocBuilder.java new file mode 100644 index 0000000000000..57edebb518529 --- /dev/null +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocBuilder.java @@ -0,0 +1,91 @@ +package io.quarkus.annotation.processor.generate_doc; + +import static io.quarkus.annotation.processor.Constants.SUMMARY_TABLE_ID_VARIABLE; +import static java.util.Objects.requireNonNull; + +import java.util.ArrayList; +import java.util.List; + +import io.quarkus.annotation.processor.Constants; + +/** + * {@link ConfigDoc} builder + */ +class ConfigDocBuilder { + + /** + * Declare AsciiDoc variable + */ + private static final String DECLARE_VAR = "\n:%s: %s\n"; + private final DocFormatter summaryTableDocFormatter; + protected final List writeItems = new ArrayList<>(); + + public ConfigDocBuilder() { + summaryTableDocFormatter = new SummaryTableDocFormatter(); + } + + protected ConfigDocBuilder(boolean showEnvVars) { + summaryTableDocFormatter = new SummaryTableDocFormatter(showEnvVars); + } + + /** + * Add documentation in a summary table and descriptive format + */ + public final ConfigDocBuilder addSummaryTable(String initialAnchorPrefix, boolean activateSearch, + List configDocItems, String fileName, + boolean includeConfigPhaseLegend) { + + writeItems.add(writer -> { + + // Create var with unique value for each summary table that will make DURATION_FORMAT_NOTE (see below) unique + var fileNameWithoutExtension = fileName.substring(0, fileName.length() - Constants.ADOC_EXTENSION.length()); + writer.append(String.format(DECLARE_VAR, SUMMARY_TABLE_ID_VARIABLE, fileNameWithoutExtension)); + + summaryTableDocFormatter.format(writer, initialAnchorPrefix, activateSearch, configDocItems, + includeConfigPhaseLegend); + + boolean hasDuration = false, hasMemory = false; + for (ConfigDocItem item : configDocItems) { + if (item.hasDurationInformationNote()) { + hasDuration = true; + } + + if (item.hasMemoryInformationNote()) { + hasMemory = true; + } + } + + if (hasDuration) { + writer.append(Constants.DURATION_FORMAT_NOTE); + } + + if (hasMemory) { + writer.append(Constants.MEMORY_SIZE_FORMAT_NOTE); + } + }); + return this; + } + + public boolean hasWriteItems() { + return !writeItems.isEmpty(); + } + + /** + * Passed strings are appended to the file + */ + public final ConfigDocBuilder write(String... strings) { + requireNonNull(strings); + writeItems.add(writer -> { + for (String str : strings) { + writer.append(str); + } + }); + return this; + } + + public final ConfigDoc build() { + final List docItemsCopy = List.copyOf(writeItems); + return () -> docItemsCopy; + } + +} diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java index fe79f0a4861a8..8ed7d934c8fb1 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemScanner.java @@ -66,7 +66,6 @@ public void addConfigRoot(final PackageElement pkg, TypeElement clazz) { String prefix = Constants.QUARKUS; ConfigPhase configPhase = ConfigPhase.BUILD_TIME; - boolean isMapping = false; for (AnnotationMirror annotationMirror : clazz.getAnnotationMirrors()) { String annotationName = annotationMirror.getAnnotationType().toString(); @@ -88,7 +87,6 @@ public void addConfigRoot(final PackageElement pkg, TypeElement clazz) { for (AnnotationMirror mirror : clazz.getAnnotationMirrors()) { if (mirror.getAnnotationType().toString().equals(Constants.ANNOTATION_CONFIG_MAPPING)) { - isMapping = true; name = Constants.EMPTY; for (Entry entry : mirror.getElementValues() .entrySet()) { @@ -113,7 +111,7 @@ public void addConfigRoot(final PackageElement pkg, TypeElement clazz) { fileName = name.replace(Constants.DOT, Constants.DASH.charAt(0)) + Constants.ADOC_EXTENSION; } - ConfigRootInfo configRootInfo = new ConfigRootInfo(name, clazz, configPhase, isMapping, fileName); + ConfigRootInfo configRootInfo = new ConfigRootInfo(name, clazz, configPhase, fileName); configRoots.add(configRootInfo); break; } diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocWriter.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocWriter.java index d7d99741ef315..b2baf426709a3 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocWriter.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocWriter.java @@ -1,65 +1,47 @@ package io.quarkus.annotation.processor.generate_doc; -import static io.quarkus.annotation.processor.Constants.SUMMARY_TABLE_ID_VARIABLE; - import java.io.IOException; import java.io.Writer; import java.nio.file.Files; import java.nio.file.Path; -import java.util.List; import io.quarkus.annotation.processor.Constants; final public class ConfigDocWriter { - private final DocFormatter summaryTableDocFormatter = new SummaryTableDocFormatter(); - private static final String DECLARE_VAR = "\n:%s: %s\n"; /** * Write all extension configuration in AsciiDoc format in `{root}/target/asciidoc/generated/config/` directory */ public void writeAllExtensionConfigDocumentation(ConfigDocGeneratedOutput output) throws IOException { - generateDocumentation(Constants.GENERATED_DOCS_PATH.resolve(output.getFileName()), output.getAnchorPrefix(), - output.isSearchable(), output.getConfigDocItems(), output.getFileName()); - } - /** - * Generate documentation in a summary table and descriptive format - * - */ - private void generateDocumentation(Path targetPath, String initialAnchorPrefix, boolean activateSearch, - List configDocItems, String fileName) - throws IOException { - if (configDocItems.isEmpty()) { + if (output.getConfigDocItems().isEmpty()) { return; } - try (Writer writer = Files.newBufferedWriter(targetPath)) { - - // Create var with unique value for each summary table that will make DURATION_FORMAT_NOTE (see below) unique - var fileNameWithoutExtension = fileName.substring(0, fileName.length() - Constants.ADOC_EXTENSION.length()); - writer.append(String.format(DECLARE_VAR, SUMMARY_TABLE_ID_VARIABLE, fileNameWithoutExtension)); - - summaryTableDocFormatter.format(writer, initialAnchorPrefix, activateSearch, configDocItems); + // Create single summary table + final var configDocBuilder = new ConfigDocBuilder().addSummaryTable(output.getAnchorPrefix(), output.isSearchable(), + output.getConfigDocItems(), output.getFileName(), true); - boolean hasDuration = false, hasMemory = false; - for (ConfigDocItem item : configDocItems) { - if (item.hasDurationInformationNote()) { - hasDuration = true; - } - - if (item.hasMemoryInformationNote()) { - hasMemory = true; - } - } + generateDocumentation(output.getFileName(), configDocBuilder); + } - if (hasDuration) { - writer.append(Constants.DURATION_FORMAT_NOTE); - } + public void generateDocumentation(String fileName, ConfigDocBuilder configDocBuilder) throws IOException { + generateDocumentation( + // Resolve output file path + Constants.GENERATED_DOCS_PATH.resolve(fileName), + // Write all items + configDocBuilder.build()); + } - if (hasMemory) { - writer.append(Constants.MEMORY_SIZE_FORMAT_NOTE); + private void generateDocumentation(Path targetPath, ConfigDoc configDoc) + throws IOException { + try (Writer writer = Files.newBufferedWriter(targetPath)) { + for (ConfigDoc.WriteItem writeItem : configDoc.getWriteItems()) { + // Write documentation item, f.e. summary table + writeItem.accept(writer); } } } + } diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigRootInfo.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigRootInfo.java index 4652502c984a0..786c07fc87186 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigRootInfo.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigRootInfo.java @@ -8,17 +8,16 @@ final public class ConfigRootInfo { private final String name; private final TypeElement clazz; private final ConfigPhase configPhase; - private final boolean mapping; private final String fileName; public ConfigRootInfo( final String name, final TypeElement clazz, - final ConfigPhase configPhase, final boolean mapping, final String fileName) { + final ConfigPhase configPhase, + final String fileName) { this.name = name; this.clazz = clazz; this.configPhase = configPhase; - this.mapping = mapping; this.fileName = fileName; } @@ -35,8 +34,7 @@ public boolean equals(final Object o) { return false; } final ConfigRootInfo that = (ConfigRootInfo) o; - return mapping == that.mapping && - name.equals(that.name) && + return name.equals(that.name) && clazz.equals(that.clazz) && configPhase == that.configPhase && fileName.equals(that.fileName); @@ -44,7 +42,7 @@ public boolean equals(final Object o) { @Override public int hashCode() { - return Objects.hash(name, clazz, configPhase, mapping, fileName); + return Objects.hash(name, clazz, configPhase, fileName); } @Override @@ -53,7 +51,6 @@ public String toString() { "name='" + name + '\'' + ", clazz=" + clazz + ", configPhase=" + configPhase + - ", mapping=" + mapping + ", fileName='" + fileName + '\'' + '}'; } @@ -69,8 +66,4 @@ public TypeElement getClazz() { public ConfigPhase getConfigPhase() { return configPhase; } - - public boolean isMapping() { - return mapping; - } } diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java index b33fe84401bb1..0564a820dbfc7 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DocFormatter.java @@ -61,8 +61,8 @@ default String getAnchor(String string) { return string.toLowerCase(); } - void format(Writer writer, String initialAnchorPrefix, boolean activateSearch, List configDocItems) - throws IOException; + void format(Writer writer, String initialAnchorPrefix, boolean activateSearch, List configDocItems, + boolean includeConfigPhaseLegend) throws IOException; void format(Writer writer, ConfigDocKey configDocKey) throws IOException; diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/JavaDocParser.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/JavaDocParser.java index 756f34b97c105..c0afad96a5ab2 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/JavaDocParser.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/JavaDocParser.java @@ -24,6 +24,7 @@ final class JavaDocParser { private static final Pattern START_OF_LINE = Pattern.compile("^", Pattern.MULTILINE); private static final Pattern REPLACE_WINDOWS_EOL = Pattern.compile("\r\n"); private static final Pattern REPLACE_MACOS_EOL = Pattern.compile("\r"); + private static final Pattern STARTING_SPACE = Pattern.compile("^ +"); private static final String BACKTICK = "`"; private static final String HASH = "#"; @@ -269,7 +270,21 @@ private void appendHtml(StringBuilder sb, Node node) { sb.append(NEW_LINE); break; case TEXT_NODE: - appendEscapedAsciiDoc(sb, ((TextNode) childNode).text()); + String text = ((TextNode) childNode).text(); + + if (text.isEmpty()) { + break; + } + + // Indenting the first line of a paragraph by one or more spaces makes the block literal + // Please see https://docs.asciidoctor.org/asciidoc/latest/verbatim/literal-blocks/ for more info + // This prevents literal blocks f.e. after
+ final var startingSpaceMatcher = STARTING_SPACE.matcher(text); + if (sb.length() > 0 && '\n' == sb.charAt(sb.length() - 1) && startingSpaceMatcher.find()) { + text = startingSpaceMatcher.replaceFirst(""); + } + + appendEscapedAsciiDoc(sb, text); break; default: appendHtml(sb, childNode); diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/MavenConfigDocBuilder.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/MavenConfigDocBuilder.java new file mode 100644 index 0000000000000..e55a495fbc1a3 --- /dev/null +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/MavenConfigDocBuilder.java @@ -0,0 +1,98 @@ +package io.quarkus.annotation.processor.generate_doc; + +import static io.quarkus.annotation.processor.Constants.EMPTY; +import static io.quarkus.annotation.processor.Constants.NEW_LINE; +import static io.quarkus.annotation.processor.Constants.SECTION_TITLE_L1; + +import java.util.ArrayList; +import java.util.List; + +import io.quarkus.annotation.processor.Constants; + +public final class MavenConfigDocBuilder extends ConfigDocBuilder { + + public MavenConfigDocBuilder() { + super(false); + } + + private final JavaDocParser javaDocParser = new JavaDocParser(); + + public void addTableTitle(String goalTitle) { + write(SECTION_TITLE_L1, goalTitle, NEW_LINE); + } + + public void addNewLine() { + write(NEW_LINE); + } + + public void addTableDescription(String goalDescription) { + write(NEW_LINE, javaDocParser.parseConfigDescription(goalDescription), NEW_LINE); + } + + public GoalParamsBuilder newGoalParamsBuilder() { + return new GoalParamsBuilder(javaDocParser); + } + + private static abstract class TableBuilder { + + protected final List configDocItems = new ArrayList<>(); + + /** + * Section name that is displayed in a table header + */ + abstract protected String getSectionName(); + + public List build() { + + // a summary table + final ConfigDocSection parameterSection = new ConfigDocSection(); + parameterSection.setShowSection(true); + parameterSection.setName(getSectionName()); + parameterSection.setSectionDetailsTitle(getSectionName()); + parameterSection.setOptional(false); + parameterSection.setConfigDocItems(List.copyOf(configDocItems)); + + // topConfigDocItem wraps the summary table + final ConfigDocItem topConfigDocItem = new ConfigDocItem(); + topConfigDocItem.setConfigDocSection(parameterSection); + + return List.of(topConfigDocItem); + } + + public boolean tableIsNotEmpty() { + return !configDocItems.isEmpty(); + } + } + + public static final class GoalParamsBuilder extends TableBuilder { + + private final JavaDocParser javaDocParser; + + private GoalParamsBuilder(JavaDocParser javaDocParser) { + this.javaDocParser = javaDocParser; + } + + public void addParam(String type, String name, String defaultValue, boolean required, String description) { + final ConfigDocKey configDocKey = new ConfigDocKey(); + configDocKey.setType(type); + configDocKey.setKey(name); + configDocKey.setConfigPhase(ConfigPhase.RUN_TIME); + configDocKey.setDefaultValue(defaultValue == null ? Constants.EMPTY : defaultValue); + if (description != null && !description.isBlank()) { + configDocKey.setConfigDoc(javaDocParser.parseConfigDescription(description)); + } else { + configDocKey.setConfigDoc(EMPTY); + } + configDocKey.setOptional(!required); + final ConfigDocItem configDocItem = new ConfigDocItem(); + configDocItem.setConfigDocKey(configDocKey); + configDocItems.add(configDocItem); + } + + @Override + protected String getSectionName() { + return "Parameter"; + } + } + +} diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java index 47935389f9c42..654b484967c47 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/SummaryTableDocFormatter.java @@ -1,5 +1,7 @@ package io.quarkus.annotation.processor.generate_doc; +import static io.quarkus.annotation.processor.Constants.CONFIG_PHASE_LEGEND; +import static io.quarkus.annotation.processor.Constants.NEW_LINE; import static io.quarkus.annotation.processor.generate_doc.DocGeneratorUtil.toEnvVarName; import java.io.IOException; @@ -15,22 +17,34 @@ final class SummaryTableDocFormatter implements DocFormatter { public static final String CONFIGURATION_TABLE_CLASS = ".configuration-reference"; private static final String TABLE_ROW_FORMAT = "\n\na|%s [[%s]]`link:#%s[%s]`\n\n[.description]\n--\n%s\n--%s|%s %s\n|%s\n"; private static final String SECTION_TITLE = "[[%s]]link:#%s[%s]"; + private static final String TABLE_HEADER_FORMAT = "[%s, cols=\"80,.^10,.^10\"]\n|==="; private static final String TABLE_SECTION_ROW_FORMAT = "\n\nh|%s\n%s\nh|Type\nh|Default"; - private static final String TABLE_HEADER_FORMAT = "[.configuration-legend]%s\n[%s, cols=\"80,.^10,.^10\"]\n|==="; + private final boolean showEnvVars; private String anchorPrefix = ""; + public SummaryTableDocFormatter(boolean showEnvVars) { + this.showEnvVars = showEnvVars; + } + + public SummaryTableDocFormatter() { + this(true); + } + /** * Generate configuration keys in table format with search engine activated or not. * Useful when we want to optionally activate or deactivate search engine */ @Override - public void format(Writer writer, String initialAnchorPrefix, boolean activateSearch, List configDocItems) + public void format(Writer writer, String initialAnchorPrefix, boolean activateSearch, + List configDocItems, boolean includeConfigPhaseLegend) throws IOException { + if (includeConfigPhaseLegend) { + writer.append("[.configuration-legend]").append(CONFIG_PHASE_LEGEND).append(NEW_LINE); + } String searchableClass = activateSearch ? SEARCHABLE_TABLE_CLASS : Constants.EMPTY; String tableClasses = CONFIGURATION_TABLE_CLASS + searchableClass; - final String tableHeaders = String.format(TABLE_HEADER_FORMAT, Constants.CONFIG_PHASE_LEGEND, tableClasses); - writer.append(tableHeaders); + writer.append(String.format(TABLE_HEADER_FORMAT, tableClasses)); anchorPrefix = initialAnchorPrefix; // make sure that section-less configs get a legend @@ -74,18 +88,20 @@ public void format(Writer writer, ConfigDocKey configDocKey) throws IOException String doc = configDocKey.getConfigDoc(); - // Convert a property name to an environment variable name and show it in the config description - final String envVarExample = String.format("ifdef::add-copy-button-to-env-var[]\n" + - "Environment variable: env_var_with_copy_button:+++%1$s+++[]\n" + - "endif::add-copy-button-to-env-var[]\n" + - "ifndef::add-copy-button-to-env-var[]\n" + - "Environment variable: `+++%1$s+++`\n" + - "endif::add-copy-button-to-env-var[]", toEnvVarName(configDocKey.getKey())); - if (configDocKey.getConfigDoc().isEmpty()) { - doc = envVarExample; - } else { - // Add 2 new lines in order to show the environment variable on next line - doc += TWO_NEW_LINES + envVarExample; + if (showEnvVars) { + // Convert a property name to an environment variable name and show it in the config description + final String envVarExample = String.format("ifdef::add-copy-button-to-env-var[]\n" + + "Environment variable: env_var_with_copy_button:+++%1$s+++[]\n" + + "endif::add-copy-button-to-env-var[]\n" + + "ifndef::add-copy-button-to-env-var[]\n" + + "Environment variable: `+++%1$s+++`\n" + + "endif::add-copy-button-to-env-var[]", toEnvVarName(configDocKey.getKey())); + if (configDocKey.getConfigDoc().isEmpty()) { + doc = envVarExample; + } else { + // Add 2 new lines in order to show the environment variable on next line + doc += TWO_NEW_LINES + envVarExample; + } } final String typeDetail = DocGeneratorUtil.getTypeFormatInformationNote(configDocKey); diff --git a/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java b/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java index 292162160a273..b84268727d097 100644 --- a/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java +++ b/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java @@ -25,6 +25,12 @@ public void parseNullJavaDoc() { assertEquals("", parsed); } + @Test + public void removeParagraphIndentation() { + String parsed = parser.parseConfigDescription("First paragraph

Second Paragraph"); + assertEquals("First paragraph\n\nSecond Paragraph", parsed); + } + @Test public void parseUntrimmedJavaDoc() { String parsed = parser.parseConfigDescription(" "); diff --git a/core/runtime/src/main/java/io/quarkus/logging/LoggingFilter.java b/core/runtime/src/main/java/io/quarkus/logging/LoggingFilter.java new file mode 100644 index 0000000000000..bf2adb61f2b4f --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/logging/LoggingFilter.java @@ -0,0 +1,23 @@ +package io.quarkus.logging; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Makes the filter class known to Quarkus by the specified name. + * The filter can then be configured for a handler (like the logging handler using {@code quarkus.log.console.filter}). + * + * This class must ONLY be placed on implementations of {@link java.util.logging.Filter} that are marked as {@code final}. + */ + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +public @interface LoggingFilter { + + /** + * Name with which the filter is referred to in configuration + */ + String name(); +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/Startup.java b/core/runtime/src/main/java/io/quarkus/runtime/Startup.java index e57a74b26bf8c..43750635fffa9 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/Startup.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/Startup.java @@ -12,11 +12,20 @@ import javax.enterprise.inject.spi.ObserverMethod; /** - * This annotation can be used to initialize a CDI bean at application startup. The behavior is similar to a declaration of an - * observer of the {@link StartupEvent} - a contextual instance is created and lifecycle callbacks (such as - * {@link javax.annotation.PostConstruct}) are invoked. In fact, a synthetic observer of the {@link StartupEvent} is generated - * for each bean annotated with this annotation. Furthermore, {@link #value()} can be used to specify the priority of the - * generated observer method and thus affect observers ordering. + * This annotation can be used to initialize a CDI bean at application startup: + *
    + *
  • If a bean class is annotated then a contextual instance is created and the {@link javax.annotation.PostConstruct} + * callbacks are invoked.
  • + *
  • If a producer method is annotated then a contextual instance is created, i.e. the producer method is invoked.
  • + *
  • If a producer field is annotated then a contextual instance is created, i.e. the producer field is read.
  • + *
  • If a non-static non-producer no-args method of a bean class is annotated then a contextual instance is created, the + * lifecycle callbacks are invoked and finally the method itself is invoked.
  • + *

    + * The behavior is similar to a declaration of a {@link StartupEvent} observer. In fact, a synthetic observer of the + * {@link StartupEvent} is generated for each occurence of this annotation. + *

    + * Furthermore, {@link #value()} can be used to specify the priority of the generated observer method and thus affects observers + * ordering. *

    * The contextual instance is destroyed immediately afterwards for {@link Dependent} beans. *

    @@ -25,7 +34,7 @@ *

      * @ApplicationScoped
      * class Bean1 {
    - *     void onStart(@Observes StartupEvent event) {
    + *     void onStart(@Observes StartupEvent event) {
      *         // place the logic here
      *     }
      * }
    @@ -33,6 +42,20 @@
      * @Startup
      * @ApplicationScoped
      * class Bean2 {
    + *
    + *     @PostConstruct
    + *     void init() {
    + *         // place the logic here
    + *     }
    + * }
    + *
    + * @ApplicationScoped
    + * class Bean3 {
    + *
    + *     @Startup
    + *     void init() {
    + *         // place the logic here
    + *     }
      * }
      * 
    * diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/ConsoleConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/ConsoleConfig.java index e888f45159bc7..a9773156dfb72 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/ConsoleConfig.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/ConsoleConfig.java @@ -55,6 +55,12 @@ public class ConsoleConfig { @ConfigItem(defaultValue = "0") int darken; + /** + * The name of the filter to link to the console handler. + */ + @ConfigItem + Optional filter; + /** * Console async logging config */ diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/DiscoveredLogComponents.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/DiscoveredLogComponents.java new file mode 100644 index 0000000000000..a7be611de09d8 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/DiscoveredLogComponents.java @@ -0,0 +1,21 @@ +package io.quarkus.runtime.logging; + +import java.util.Collections; +import java.util.Map; + +public class DiscoveredLogComponents { + + private Map nameToFilterClass = Collections.emptyMap(); + + public Map getNameToFilterClass() { + return nameToFilterClass; + } + + public void setNameToFilterClass(Map nameToFilterClass) { + this.nameToFilterClass = nameToFilterClass; + } + + public static DiscoveredLogComponents ofEmpty() { + return new DiscoveredLogComponents(); + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/FileConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/FileConfig.java index 5e9b9239daa78..9e8492fb82e31 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/FileConfig.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/FileConfig.java @@ -40,6 +40,12 @@ public class FileConfig { @ConfigItem(defaultValue = DEFAULT_LOG_FILE_NAME) File path; + /** + * The name of the filter to link to the file handler. + */ + @ConfigItem + Optional filter; + /** * File async logging config */ diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java index e3bf40d330e4f..f10f9c3dab731 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java @@ -18,6 +18,7 @@ import java.util.function.Function; import java.util.function.Supplier; import java.util.logging.ErrorManager; +import java.util.logging.Filter; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; @@ -29,6 +30,7 @@ import org.jboss.logmanager.LogContext; import org.jboss.logmanager.Logger; import org.jboss.logmanager.errormanager.OnlyOnceErrorManager; +import org.jboss.logmanager.filters.AllFilter; import org.jboss.logmanager.formatters.ColorPatternFormatter; import org.jboss.logmanager.formatters.PatternFormatter; import org.jboss.logmanager.handlers.AsyncHandler; @@ -74,15 +76,17 @@ public static void handleFailedStart(RuntimeValue>> ba ConsoleRuntimeConfig consoleRuntimeConfig = new ConsoleRuntimeConfig(); ConfigInstantiator.handleObject(consoleRuntimeConfig); new LoggingSetupRecorder(new RuntimeValue<>(consoleRuntimeConfig)).initializeLogging(config, buildConfig, + DiscoveredLogComponents.ofEmpty(), Collections.emptyMap(), false, null, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), banner, LaunchMode.DEVELOPMENT); + Collections.emptyList(), banner, LaunchMode.DEVELOPMENT, false); } public ShutdownListener initializeLogging(LogConfig config, LogBuildTimeConfig buildConfig, + DiscoveredLogComponents discoveredLogComponents, final Map categoryDefaultMinLevels, final boolean enableWebStream, final RuntimeValue> devUiConsoleHandler, @@ -90,7 +94,9 @@ public ShutdownListener initializeLogging(LogConfig config, LogBuildTimeConfig b final List>> additionalNamedHandlers, final List>> possibleConsoleFormatters, final List>> possibleFileFormatters, - final RuntimeValue>> possibleBannerSupplier, LaunchMode launchMode) { + final RuntimeValue>> possibleBannerSupplier, + LaunchMode launchMode, + boolean validateFilters) { ShutdownNotifier shutdownNotifier = new ShutdownNotifier(); final Map categories = config.categories; @@ -126,13 +132,15 @@ public void accept(String loggerName, CleanupFilterConfig config) { handler.setFilter(cleanupFiler); } + Map namedFilters = createNamedFilters(discoveredLogComponents); + final ArrayList handlers = new ArrayList<>( 3 + additionalHandlers.size() + (config.handlers.isPresent() ? config.handlers.get().size() : 0)); if (config.console.enable) { final Handler consoleHandler = configureConsoleHandler(config.console, consoleRuntimeConfig.getValue(), - errorManager, cleanupFiler, - possibleConsoleFormatters, possibleBannerSupplier, launchMode); + errorManager, cleanupFiler, namedFilters, possibleConsoleFormatters, possibleBannerSupplier, + launchMode, validateFilters); errorManager = consoleHandler.getErrorManager(); handlers.add(consoleHandler); } @@ -157,11 +165,13 @@ public void close() throws SecurityException { if (config.file.enable) { handlers.add( - configureFileHandler(config.file, errorManager, cleanupFiler, possibleFileFormatters)); + configureFileHandler(config.file, errorManager, cleanupFiler, namedFilters, possibleFileFormatters, + validateFilters)); } if (config.syslog.enable) { - final Handler syslogHandler = configureSyslogHandler(config.syslog, errorManager, cleanupFiler); + final Handler syslogHandler = configureSyslogHandler(config.syslog, errorManager, cleanupFiler, + namedFilters, validateFilters); if (syslogHandler != null) { handlers.add(syslogHandler); } @@ -185,8 +195,8 @@ public void close() throws SecurityException { Map namedHandlers = shouldCreateNamedHandlers(config, additionalNamedHandlers) ? createNamedHandlers(config, consoleRuntimeConfig.getValue(), additionalNamedHandlers, - possibleConsoleFormatters, possibleFileFormatters, errorManager, cleanupFiler, launchMode, - shutdownNotifier) + possibleConsoleFormatters, possibleFileFormatters, errorManager, cleanupFiler, namedFilters, launchMode, + shutdownNotifier, false) : Collections.emptyMap(); if (!categories.isEmpty()) { Map additionalNamedHandlersMap; @@ -238,6 +248,27 @@ public void accept(String categoryName, CategoryConfig config) { return shutdownNotifier; } + private static Map createNamedFilters(DiscoveredLogComponents discoveredLogComponents) { + if (discoveredLogComponents.getNameToFilterClass().isEmpty()) { + return Collections.emptyMap(); + } + + Map nameToFilter = new HashMap<>(); + discoveredLogComponents.getNameToFilterClass().forEach(new BiConsumer<>() { + @Override + public void accept(String name, String className) { + try { + nameToFilter.put(name, + (Filter) Class.forName(className, true, Thread.currentThread().getContextClassLoader()) + .getConstructor().newInstance()); + } catch (Exception e) { + throw new RuntimeException("Unable to create instance of Logging Filter '" + className + "'"); + } + } + }); + return nameToFilter; + } + /** * WARNING: this method is part of the recorder but is actually called statically at build time. * You may not push RuntimeValue's to it. @@ -267,15 +298,15 @@ public static void initializeBuildTimeLogging(LogConfig config, LogBuildTimeConf if (config.console.enable) { final Handler consoleHandler = configureConsoleHandler(config.console, consoleConfig, errorManager, - logCleanupFilter, - Collections.emptyList(), new RuntimeValue<>(Optional.empty()), launchMode); + logCleanupFilter, Collections.emptyMap(), Collections.emptyList(), + new RuntimeValue<>(Optional.empty()), launchMode, false); errorManager = consoleHandler.getErrorManager(); handlers.add(consoleHandler); } Map namedHandlers = createNamedHandlers(config, consoleConfig, Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), errorManager, logCleanupFilter, launchMode, dummy); + Collections.emptyList(), Collections.emptyList(), errorManager, logCleanupFilter, + Collections.emptyMap(), launchMode, dummy, true); for (Map.Entry entry : categories.entrySet()) { final String categoryName = entry.getKey(); @@ -356,18 +387,18 @@ private static Map createNamedHandlers(LogConfig config, Consol List>> additionalNamedHandlers, List>> possibleConsoleFormatters, List>> possibleFileFormatters, - ErrorManager errorManager, - LogCleanupFilter cleanupFilter, LaunchMode launchMode, - ShutdownNotifier shutdownHandler) { + ErrorManager errorManager, LogCleanupFilter cleanupFilter, + Map namedFilters, LaunchMode launchMode, + ShutdownNotifier shutdownHandler, boolean validateFilters) { Map namedHandlers = new HashMap<>(); for (Entry consoleConfigEntry : config.consoleHandlers.entrySet()) { ConsoleConfig namedConsoleConfig = consoleConfigEntry.getValue(); if (!namedConsoleConfig.enable) { continue; } - final Handler consoleHandler = configureConsoleHandler(namedConsoleConfig, consoleRuntimeConfig, errorManager, - cleanupFilter, - possibleConsoleFormatters, null, launchMode); + final Handler consoleHandler = configureConsoleHandler(namedConsoleConfig, consoleRuntimeConfig, + errorManager, cleanupFilter, namedFilters, possibleConsoleFormatters, null, launchMode, + validateFilters); addToNamedHandlers(namedHandlers, consoleHandler, consoleConfigEntry.getKey()); } for (Entry fileConfigEntry : config.fileHandlers.entrySet()) { @@ -375,8 +406,8 @@ private static Map createNamedHandlers(LogConfig config, Consol if (!namedFileConfig.enable) { continue; } - final Handler fileHandler = configureFileHandler(namedFileConfig, errorManager, cleanupFilter, - possibleFileFormatters); + final Handler fileHandler = configureFileHandler(namedFileConfig, errorManager, cleanupFilter, namedFilters, + possibleFileFormatters, validateFilters); addToNamedHandlers(namedHandlers, fileHandler, fileConfigEntry.getKey()); } for (Entry sysLogConfigEntry : config.syslogHandlers.entrySet()) { @@ -384,7 +415,8 @@ private static Map createNamedHandlers(LogConfig config, Consol if (!namedSyslogConfig.enable) { continue; } - final Handler syslogHandler = configureSyslogHandler(namedSyslogConfig, errorManager, cleanupFilter); + final Handler syslogHandler = configureSyslogHandler(namedSyslogConfig, errorManager, cleanupFilter, + namedFilters, validateFilters); if (syslogHandler != null) { addToNamedHandlers(namedHandlers, syslogHandler, sysLogConfigEntry.getKey()); } @@ -442,8 +474,7 @@ public void run() { } private static void addNamedHandlersToRootHandlers(Optional> handlerNames, Map namedHandlers, - ArrayList effectiveHandlers, - ErrorManager errorManager) { + ArrayList effectiveHandlers, ErrorManager errorManager) { if (handlerNames.isEmpty()) { return; } @@ -471,11 +502,15 @@ public void initializeLoggingForImageBuild() { } } - private static Handler configureConsoleHandler(final ConsoleConfig config, ConsoleRuntimeConfig consoleRuntimeConfig, + private static Handler configureConsoleHandler(final ConsoleConfig config, + ConsoleRuntimeConfig consoleRuntimeConfig, final ErrorManager defaultErrorManager, final LogCleanupFilter cleanupFilter, + final Map namedFilters, final List>> possibleFormatters, - final RuntimeValue>> possibleBannerSupplier, LaunchMode launchMode) { + final RuntimeValue>> possibleBannerSupplier, + LaunchMode launchMode, + boolean validateFilters) { Formatter formatter = null; boolean formatterWarning = false; @@ -516,7 +551,7 @@ private static Handler configureConsoleHandler(final ConsoleConfig config, Conso config.stderr ? ConsoleHandler.Target.SYSTEM_ERR : ConsoleHandler.Target.SYSTEM_OUT, formatter); consoleHandler.setLevel(config.level); consoleHandler.setErrorManager(defaultErrorManager); - consoleHandler.setFilter(cleanupFilter); + applyFilter(validateFilters, defaultErrorManager, cleanupFilter, config.filter, namedFilters, consoleHandler); Handler handler = config.async.enable ? createAsyncHandler(config.async, config.level, consoleHandler) : consoleHandler; @@ -554,7 +589,9 @@ public void close() throws SecurityException { } private static Handler configureFileHandler(final FileConfig config, final ErrorManager errorManager, - final LogCleanupFilter cleanupFilter, final List>> possibleFileFormatters) { + final LogCleanupFilter cleanupFilter, Map namedFilters, + final List>> possibleFileFormatters, + final boolean validateFilters) { FileHandler handler; FileConfig.RotationConfig rotationConfig = config.rotation; if (rotationConfig.fileSuffix.isPresent()) { @@ -596,6 +633,7 @@ private static Handler configureFileHandler(final FileConfig config, final Error handler.setErrorManager(errorManager); handler.setLevel(config.level); handler.setFilter(cleanupFilter); + applyFilter(validateFilters, errorManager, cleanupFilter, config.filter, namedFilters, handler); if (formatterWarning) { handler.getErrorManager().error("Multiple file formatters were activated", null, ErrorManager.GENERIC_FAILURE); @@ -607,9 +645,25 @@ private static Handler configureFileHandler(final FileConfig config, final Error return handler; } - private static Handler configureSyslogHandler(final SyslogConfig config, - final ErrorManager errorManager, - final LogCleanupFilter logCleanupFilter) { + private static void applyFilter(boolean validateFilters, ErrorManager errorManager, LogCleanupFilter cleanupFilter, + Optional filterName, Map namedFilters, Handler handler) { + if (filterName.isEmpty() || !validateFilters) { + handler.setFilter(cleanupFilter); + } else { + String name = filterName.get(); + Filter filter = namedFilters.get(name); + if (filter == null) { + errorManager.error("Unable to find named filter '" + name + "'", null, ErrorManager.GENERIC_FAILURE); + handler.setFilter(cleanupFilter); + } else { + handler.setFilter(new AllFilter(List.of(cleanupFilter, filter))); + } + } + } + + private static Handler configureSyslogHandler(final SyslogConfig config, final ErrorManager errorManager, + final LogCleanupFilter logCleanupFilter, + final Map namedFilters, final boolean validateFilters) { try { final SyslogHandler handler = new SyslogHandler(config.endpoint.getHostString(), config.endpoint.getPort()); handler.setAppName(config.appName.orElse(getProcessName())); @@ -625,6 +679,7 @@ private static Handler configureSyslogHandler(final SyslogConfig config, handler.setFormatter(formatter); handler.setErrorManager(errorManager); handler.setFilter(logCleanupFilter); + applyFilter(validateFilters, errorManager, logCleanupFilter, config.filter, namedFilters, handler); if (config.async.enable) { return createAsyncHandler(config.async, config.level, handler); } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/SyslogConfig.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/SyslogConfig.java index 38a3e49871c98..f0c64fc80f06f 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/SyslogConfig.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/SyslogConfig.java @@ -89,6 +89,12 @@ public class SyslogConfig { @ConfigItem(defaultValue = "ALL") Level level; + /** + * The name of the filter to link to the file handler. + */ + @ConfigItem + Optional filter; + /** * Syslog async logging config */ diff --git a/docs/pom.xml b/docs/pom.xml index 9d822f26545c2..4ba712b36fe97 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -2802,6 +2802,23 @@ + + generate-quarkus-mvn-plugin-docs + process-classes + + java + + + ${skipDocs} + io.quarkus.docs.generation.QuarkusMavenPluginDocsGenerator + + ${project.basedir}/../devtools/maven/target/classes/META-INF/maven/plugin.xml + + + ${env.MAVEN_CMD_LINE_ARGS} + + + all-build-item-classes process-classes diff --git a/docs/src/main/asciidoc/command-mode-reference.adoc b/docs/src/main/asciidoc/command-mode-reference.adoc index 7c0c2b4e29006..89f0013fe373e 100644 --- a/docs/src/main/asciidoc/command-mode-reference.adoc +++ b/docs/src/main/asciidoc/command-mode-reference.adoc @@ -18,6 +18,19 @@ Clone the Git repository: `git clone {quickstarts-clone-url}`, or download an {q The solution is located in the `getting-started-command-mode` {quickstarts-tree-url}/getting-started-command-mode[directory]. +== Creating the Maven project + +First, we need to create a new Quarkus project with the following command: + +:create-app-artifact-id: command-mode-quickstart +include::{includes}/devtools/create-app.adoc[] + +NOTE: The suggested project creation command lines disable the codestarts to avoid including a REST server. Similarly, if you use code.quarkus.io to generate a +project, you need to go to *MORE OPTIONS -> Starter Code* and select *No* to avoid adding the RESTEasy Reactive extension. + +The RESTEasy Reactive extension is added automatically only if you ask for codestarts and you didn't specify any extensions. + + == Writing Command Mode Applications There are two different approaches that can be used to implement applications @@ -183,7 +196,7 @@ public class HelloTest { @Test @Launch("World") public void testLaunchCommand(LaunchResult result) { - Assertions.assertEquals("Hello World", result.getOutput()); + Assertions.assertTrue(result.getOutput().contains("Hello World")); } @Test @@ -195,12 +208,15 @@ public class HelloTest { public void testManualLaunch(QuarkusMainLauncher launcher) { LaunchResult result = launcher.launch("Everyone"); Assertions.assertEquals(0, result.exitCode()); - Assertions.assertEquals("Hello Everyone", result.getOutput()); + Assertions.assertTrue(result.getOutput().contains("Hello Everyone")); } } ---- +Each test method must be annotated with `@Launch` to automatically start the application or have a `QuarkusMainLauncher` +parameter to manually launch the application. + We can then extend this with an integration test that can be used to test the native executable or runnable jar: [source,java] diff --git a/docs/src/main/asciidoc/infinispan-client.adoc b/docs/src/main/asciidoc/infinispan-client.adoc index 46ae2b62bc806..3a01a42383fb2 100644 --- a/docs/src/main/asciidoc/infinispan-client.adoc +++ b/docs/src/main/asciidoc/infinispan-client.adoc @@ -62,17 +62,17 @@ Add the following properties to connect to Infinispan Server: [source,properties] ---- -# Infinispan Server address -quarkus.infinispan-client.server-list=localhost:11222 +quarkus.infinispan-client.server-list=localhost:11222 <1> -# Authentication -quarkus.infinispan-client.auth-username=admin -quarkus.infinispan-client.auth-password=password +quarkus.infinispan-client.auth-username=admin <2> +quarkus.infinispan-client.auth-password=password <3> -# Infinispan client intelligence -# Use BASIC as a Docker for Mac workaround -quarkus.infinispan-client.client-intelligence=BASIC +quarkus.infinispan-client.client-intelligence=BASIC <4> ---- +<1> Sets Infinispan Server address list, separated with commas +<2> Sets the authentication username +<3> Sets the authentication password +<4> Sets the client intelligence. Use BASIC as a workaround if using Docker for Mac. .Running Infinispan Server @@ -90,6 +90,31 @@ Infinispan Server also enables authentication and security authorization by defa $ ./bin/cli.sh user create admin -p password ---- +=== Creating caches from the client + +When a cache is accessed from the client, if the cache does not exist in the Infinispan Server and you want +to create it on first access, use one of the following properties: + +[source,properties] +---- +quarkus.infinispan-client.cache.books.configuration-uri=cacheConfig.xml <1> +quarkus.infinispan-client.cache.magazine.configuration= <2> +---- +<1> The file name located under the `resources` folder that contains the configuration of the 'books' cache +<2> The configuration of the 'magazine' cache as a plain text property + +If both `configuration-uri` and `configuration` are configured for the same cache with the same Quarkus profile, +`configuration-uri` gets preference over `configuration`. + +If nothing is configured for a particular cache, it will be created with the following basic configuration: + +[source, xml] +---- + + + +---- + === Authentication mechanisms You can use the following authentication mechanisms with the Infinispan client: @@ -478,10 +503,28 @@ You can read more about https://infinispan.org/docs/stable/titles/developing/dev == Near Caching -Near caching is disabled by default, but you can enable it by setting the profile config property -`quarkus.infinispan-client.near-cache-max-entries` to a value greater than 0. You can also configure -a regular expression so that only a subset of caches have near caching applied through the -`quarkus.infinispan-client.near-cache-name-pattern` attribute. +Near caching is disabled by default, but you can enable it on a per cache basic by configuring the following properties: + +[source,properties] +---- +quarkus.infinispan-client.cache.books.near-cache-mode=INVALIDATED <1> +quarkus.infinispan-client.cache.books.near-cache-max-entries=200 <2> +quarkus.infinispan-client.cache.books.near-cache-use-bloom-filter=true <3> +---- + +<1> Enables near caching for the 'books' cache by setting the mode to `INVALIDATED` +<2> Sets the maximum number of entries that the near cache of the 'books' cache can hold before eviction occurs +<3> Enables bloom filter for the 'books' cache + +=== Bounded near caching + +You should always use bounded near caches by specifying the maximum number of entries they can contain. + +=== Bloom filters + +If you need to optimize the performance for write operations by reducing the total number of invalidation messages, +enable bloom filter. Bloom filters reside on Infinispan Server and keep track of the entries that the client has requested. +They cannot be used with unbounded near cache: maximum number of entries must be defined when enabling bloom filters. == Encryption diff --git a/docs/src/main/asciidoc/logging.adoc b/docs/src/main/asciidoc/logging.adoc index 60e877a48f30d..f946ff505ef15 100644 --- a/docs/src/main/asciidoc/logging.adoc +++ b/docs/src/main/asciidoc/logging.adoc @@ -315,6 +315,42 @@ The console log handler is enabled by default. It outputs all log events to the For details of its configuration options, see link:#quarkus-log-logging-log-config_quarkus.log.console-console-logging[the Console Logging configuration reference]. +[TIP] +.Logging filters +==== +Log handlers (like the console log handler) can have a link:https://docs.oracle.com/en/java/javase/11/docs/api/java.logging/java/util/logging/Filter.html[filter] associated with them, whose +purpose is to determine whether a record should actually be logged or not. + +These filters are registered by placing the `@io.quarkus.logging.LoggingFilter` annotation on a (`final`) class that implements `java.util.logging.Filter` and setting the `name` property. + +Finally, the filter is attached using the `filter` configuration property of the appropriate handler. + +Let's say for example that we wanted to filter out logging records that contained the word `test` from the console logs. +We could write a filter like so: + +[source,java] +---- +import io.quarkus.logging.LoggingFilter; +import java.util.logging.Filter; +import java.util.logging.LogRecord; + +@LoggingFilter(name = "my-filter") +public final class TestFilter implements Filter { + @Override + public boolean isLoggable(LogRecord record) { + return !record.getMessage().contains("test"); + } +} +---- + +And we would register this filter to the console handler like so: + +[source, properties] +---- +quarkus.log.console.filter=my-filter +---- +==== + === File log handler The file log handler is disabled by default. It outputs all log events to a file on the application's host. diff --git a/docs/src/main/asciidoc/platform.adoc b/docs/src/main/asciidoc/platform.adoc index 010426f5fad4c..4451a70e81c33 100644 --- a/docs/src/main/asciidoc/platform.adoc +++ b/docs/src/main/asciidoc/platform.adoc @@ -111,13 +111,14 @@ Extension developers that want to make their configuration options platform-spec package io.quarkus.deployment.pkg; @ConfigRoot(phase = ConfigPhase.BUILD_TIME) -public class NativeConfig { +@ConfigMapping(prefix = "quarkus") +public interface NativeConfig { /** * The docker image to use to do the image build */ - @ConfigItem(defaultValue = "${platform.quarkus.native.builder-image}") - public String builderImage; + @WithDefault("${platform.quarkus.native.builder-image}") + String builderImage(); } ---- diff --git a/docs/src/main/asciidoc/quarkus-maven-plugin.adoc b/docs/src/main/asciidoc/quarkus-maven-plugin.adoc new file mode 100644 index 0000000000000..249c48287672c --- /dev/null +++ b/docs/src/main/asciidoc/quarkus-maven-plugin.adoc @@ -0,0 +1,27 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += Quarkus Maven Plugin + +The Quarkus Maven Plugin builds the Quarkus applications, and provides helpers to launch dev mode or build native executables. +For more information about how to use the Quarkus Maven Plugin, please refer to the xref:maven-tooling.adoc[Maven Tooling guide]. + +include::./attributes.adoc[] + +== Discover Maven goals + +Like most Maven plugins, the Quarkus Maven Plugin has a `help` goal that prints the description of the plugin, listing all available goals as well as their description. +It is also possible to print out detailed information about a goal, all its parameters and their default values. For instance, to see the help for the `create` goal, run: + +[source,shell] +---- +./mvnw quarkus:help -Ddetail -Dgoal=create +---- + +== Maven goals reference + +Here is the list of all the Quarkus Maven Plugin goals: + +include::{generated-dir}/config/quarkus-maven-plugin-goals.adoc[opts=optional, leveloffset=+2] diff --git a/docs/src/main/asciidoc/writing-extensions.adoc b/docs/src/main/asciidoc/writing-extensions.adoc index 48c3d752d6352..583f6a81c84c9 100644 --- a/docs/src/main/asciidoc/writing-extensions.adoc +++ b/docs/src/main/asciidoc/writing-extensions.adoc @@ -864,7 +864,7 @@ The types of values that can be injected include: - <> produced by previous build steps - <> to produce items for subsequent build steps -- <> types +- <> types - Template objects for <> WARNING: Objects which are injected into a build step method or its class _must not_ be used outside that method's @@ -1050,122 +1050,19 @@ will result in the creation of the `build.dot` file in the project's root direct [[configuration]] === Configuration -Configuration in Quarkus is based on SmallRye Config, an implementation of the MicroProfile Config specification. -All the standard features of MP-Config are supported; in addition, there are several extensions which are made available -by the SmallRye Config project as well as by Quarkus itself. +Configuration in Quarkus is based on https://smallrye.io/smallrye-config/Main/[SmallRye Config]. All features provided +by https://smallrye.io/smallrye-config/Main/[SmallRye Config] are also available in Quarkus. -The value of these properties is configured in a `application.properties` file that follows the MicroProfile config format. +Extensions must use https://smallrye.io/smallrye-config/Main/config/mappings/[SmallRye Config @ConfigMapping] to map +the configuration required by the Extension. This will allow Quarkus to automatically expose an instance of the mapping +to each configuration phase and generate the configuration documentation. -Configuration of Quarkus extensions is injection-based, using annotations. +==== Config Phases -==== Configuration Keys - -Leaf configuration keys are mapped to non-`private` fields via the `@io.quarkus.runtime.annotations.ConfigItem` annotation. - -NOTE: Though the SmallRye Config project is used for implementation, the standard `@ConfigProperty` annotation does not have the -same semantics that are needed to support configuration within extensions. - -Configuration keys are normally derived from the field names that they are tied to. This is done by de-camel-casing the name and then -joining the segments with hyphens (`-`). Some examples: - -* `bindAddress` becomes `bind-address` -* `keepAliveTime` becomes `keep-alive-time` -* `requestDNSTimeout` becomes `request-dns-timeout` - -The name can also be explicitly specified by giving a `name` attribute to the `@ConfigItem` annotation. - -NOTE: Though it is possible to override the configuration key name using the `name` attribute of `@ConfigItem`, -normally this should only be done in cases where (for example) the configuration key name is the same as a Java keyword. - -==== Configuration Value types - -The type of the field with the `@ConfigItem` annotation determines the conversion that is applied to it. Quarkus -extensions may use the full range of configuration types made available by SmallRye Config, which includes: - -* All primitive types and primitive wrapper types -* `String` -* Any type which has a constructor accepting a single argument of type `String` or `CharSequence` -* Any type which has a static method named `of` which accepts a single argument of type `String` -* Any type which has a static method named `valueOf` or `parse` which accepts a single argument of type `CharSequence` or `String` -* `java.time.Duration` -* `java.util.regex.Pattern` -* `java.nio.file.Path` -* `io.quarkus.runtime.configuration.MemorySize` to represent data sizes -* `java.net.InetSocketAddress`, `java.net.InetAddress` and `org.wildfly.common.net.CidrAddress` -* `java.util.Locale` where the string value is an IETF BCP 47 language tag -* `java.nio.charset.Charset` where the string value is a canonical name or an alias -* `java.time.ZoneId` where the string value is parsed via `java.time.ZoneId.of(String)` -* A `List` or `Optional` of any of the above types -* `OptionalInt`, `OptionalLong`, `OptionalDouble` - -In addition, custom converters may be registered by adding their fully qualified class name in file -`META-INF/services/org.eclipse.microprofile.config.spi.Converter`. - -Though these implicit converters use reflection, Quarkus will automatically ensure that they are loaded at the appropriate time. - -===== Optional Values - -If the configuration type is one of the optional types, then empty values are allowed for the configuration key; otherwise, -specification of an empty value will result in a configuration error which prevents the application from starting. This -is especially relevant to configuration properties of inherently emptiable values such as `List`, `Set`, and `String`. Such -value types will never be empty; in the event of an empty value, an empty `Optional` is always used. - -==== Configuration Default Values - -A configuration item can be marked to have a default value. The default value is used when no matching configuration key -is specified in the configuration. - -Configuration items with a primitive type (such as `int` or `boolean`) implicitly use a default value of `0` or `false`. The -sole exception to this rule is the `char` type which does not have an implicit default value. - -A property with a default value is not implicitly optional. If a non-optional configuration item with a default value -is explicitly specified to have an empty value, the application will report a configuration error and will not start. If -it is desired for a property to have a default value and also be optional, it must have an `Optional` type as described above. - -==== Configuration Groups - -Configuration values are always collected into grouping classes which are marked with the `@io.quarkus.runtime.annotations.ConfigGroup` -annotation. These classes contain a field for each key within its group. In addition, configuration groups can be nested. - -===== Optional Configuration Groups - -A nested configuration group may be wrapped with an `Optional` type. In this case, the group is not populated unless one -or more properties within that group are specified in the configuration. If the group is populated, then any required -properties in the group must also be specified otherwise a configuration error will be reported and the application will -not start. - -==== Configuration Maps - -A `Map` can be used for configuration at any position where a configuration group would be allowed. The key type of such a -map *must* be `String`, and its value may be either a configuration group class or a valid leaf type. The configuration -key segment following the map's key segment will be used as the key for map values. - -[id='configuration-roots'] -==== Configuration Roots - -Configuration roots are configuration groups that appear in the root of the configuration tree. A configuration property's full -name is determined by joining the string `quarkus.` with the hyphenated name of the fields that form the path from the root to the -leaf field. For example, if I define a configuration root group called `ThreadPool`, with a nested group in a field named `sizing` -that in turn contains a field called `minSize`, the final configuration property will be called `quarkus.thread-pool.sizing.min-size`. - -A configuration root's name can be given with the `name` property, or it can be inferred from the class name. If the latter, -then the configuration key will be the class name, minus any `Config` or `Configuration` suffix, broken up by camel-case, -lowercased, and re-joined using hyphens (`-`). - -A configuration root's class name can contain an extra suffix segment for the case where there are configuration -roots for multiple <>. Classes which correspond to the `BUILD_TIME` and `BUILD_AND_RUN_TIME_FIXED` -may end with `BuildTimeConfig` or `BuildTimeConfiguration`, classes which correspond to the `RUN_TIME` phase -may end with `RuntimeConfig`, `RunTimeConfig`, `RuntimeConfiguration` or `RunTimeConfiguration` while classes which correspond -to the `BOOTSTRAP` configuration may end with `BootstrapConfig` or `BootstrapConfiguration`. - -Note: The current implementation is still using injection site to determine the root set, so to avoid migration problems, it -is recommended that the injection site (field or parameter) have the same name as the configuration root class until -this change is complete. - -===== Configuration Root Phases - -Configuration roots are strictly bound by configuration phase, and attempting to access a configuration root from outside its corresponding phase will result in an error. -A configuration root dictates when its contained keys are read from configuration, and when they are available to applications. The phases defined by `io.quarkus.runtime.annotations.ConfigPhase` are as follows: +Configuration mappings are strictly bound by configuration phase, and attempting to access a configuration mapping from +outside its corresponding phase will result in an error. They dictate when its contained keys are read from the +configuration, and when they are available to applications. The phases defined by +`io.quarkus.runtime.annotations.ConfigPhase` are as follows: [cols="<3m,^1,^1,^1,^1,<8",options="header"] |=== @@ -1206,7 +1103,7 @@ A configuration root dictates when its contained keys are read from configuratio |=== -For all cases other than the `BUILD_TIME` case, the configuration root class and all the configuration groups and types contained therein must be located in, or reachable from, the extension's run time artifact. Configuration roots of phase `BUILD_TIME` may be located in or reachable from either of the extension's run time or deployment artifacts. +For all cases other than the `BUILD_TIME` case, the configuration mapping interface and all the configuration groups and types contained therein must be located in, or reachable from, the extension's run time artifact. Configuration mappings of phase `BUILD_TIME` may be located in or reachable from either of the extension's run time or deployment artifacts. IMPORTANT: _Bootstrap_ configuration steps are executed during runtime-init *before* any of other runtime steps. This means that code executed as part of this step cannot access anything that gets initialized in runtime init steps (runtime synthetic CDI beans being one such example). @@ -1214,84 +1111,84 @@ IMPORTANT: _Bootstrap_ configuration steps are executed during runtime-init *bef [source%nowrap,java] ---- -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.DefaultConverter +import io.quarkus.runtime.annotations.ConfigPhase; +import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; import java.io.File; import java.util.logging.Level; -@ConfigGroup <1> -public class FileConfig { - - /** - * Enable logging to a file. - */ - @ConfigItem(defaultValue = "true") - boolean enable; - - /** - * The log format. - */ - @ConfigItem(defaultValue = "%d{yyyy-MM-dd HH:mm:ss,SSS} %h %N[%i] %-5p [%c{1.}] (%t) %s%e%n") - String format; - - /** - * The level of logs to be written into the file. - */ - @ConfigItem(defaultValue = "ALL") - Level level; - - /** - * The name of the file in which logs will be written. - */ - @ConfigItem(defaultValue = "application.log") - File path; - -} - /** * Logging configuration. */ -@ConfigRoot(phase = ConfigPhase.RUN_TIME) <2> -public class LogConfiguration { - +@ConfigMapping(prefix = "quarkus.log") // <1> +@ConfigRoot(phase = ConfigPhase.RUN_TIME) // <2> +public interface LogConfiguration { // ... /** * Configuration properties for the logging file handler. */ - FileConfig file; + FileConfig file(); + + @ConfigGroup // <3> + interface FileConfig { + /** + * Enable logging to a file. + */ + @WithDefault("true") + boolean enable(); + + /** + * The log format. + */ + @WithDefault("%d{yyyy-MM-dd HH:mm:ss,SSS} %h %N[%i] %-5p [%c{1.}] (%t) %s%e%n") + String format(); + + /** + * The level of logs to be written into the file. + */ + @WithDefault("ALL") + Level level(); + + /** + * The name of the file in which logs will be written. + */ + @WithDefault("application.log") + File path(); + } } +---- +[source%nowrap,java] +---- public class LoggingProcessor { // ... - /** + /* * Logging configuration. */ - <3> - LogConfiguration config; + LogConfiguration config; // <4> } ---- A configuration property name can be split into segments. For example, a property name like `quarkus.log.file.enable` can be split into the following segments: -* `quarkus` - a namespace claimed by Quarkus which is a prefix for all `@ConfigRoot` classes, -* `log` - a name segment which corresponds to the `LogConfiguration` class annotated with `@ConfigRoot`, +* `quarkus` - a namespace claimed by Quarkus which is a prefix for `@ConfigMapping` interfaces, +* `log` - a name segment which corresponds to the prefix set in the interface annotated with `@ConfigMapping`, * `file` - a name segment which corresponds to the `file` field in this class, * `enabled` - a name segment which corresponds to `enable` field in `FileConfig` class annotated with `@ConfigGroup`. -<1> The `FileConfig` class is annotated with `@ConfigGroup` to indicate that this is an aggregate +<1> The `@ConfigMapping` annotation indicates that the interface is a configuration mapping, in this case one which +corresponds to a `quarkus.log` segment. +<2> The `@ConfigRoot` annotation indicated to which Config phase, the configuration applies to. +<3> The `FileConfig` class is annotated with `@ConfigGroup` to indicate that this is an aggregate configuration object containing a collection of configurable properties, rather than being a simple configuration key type. -<2> The `@ConfigRoot` annotation indicates that this object is a configuration root group, in this case one which -corresponds to a `log` segment. A class name is used to link configuration root group with the segment from a -property name. The `Configuration` part is stripped off from a `LogConfiguration` class name and the remaining `Log` -is lowercased to become a `log`. Since all `@ConfigRoot` annotated classes uses `quarkus` as a prefix, this finally -becomes `quarkus.log` and represents the properties which names begin with `quarkus.log.*`. -<3> Here the `LoggingProcessor` injects a `LogConfiguration` instance automatically by detecting the `@ConfigRoot` +<4> Here the `LoggingProcessor` injects a `LogConfiguration` instance automatically by detecting the `@ConfigRoot` annotation. A corresponding `application.properties` for the above example could be: @@ -1305,100 +1202,11 @@ quarkus.log.file.path=/tmp/debug.log Since `format` is not defined in these properties, the default value from `@ConfigItem` will be used instead. - -==== Enhanced conversion -You can use enhanced conversion of a config item by using the `@ConvertWith` annotation which accepts a `Converter` class object. -If the annotation is present on a config item, the implicit or custom-built in converter in use will be overridden by the value provided. -To do, see the example below which converts `YES` or `NO` values to `boolean`. -[source%nowrap,java] ----- -@ConfigRoot -public class SomeConfig { - /** - * Config item with enhanced converter - */ - @ConvertWith(YesNoConverter.class) // <1> - @ConfigItem(defaultValue = "NO") - Boolean answer; - - - public static class YesNoConverter implements Converter { - - public YesNoConverter() {} - - @Override - public Boolean convert(String s) { - if (s == null || s.isEmpty()) { - return false; - } - - switch (s) { - case "YES": - return true; - case "NO": - return false; - } - - throw new IllegalArgumentException("Unsupported value " + s + " given"); - } - } -} ----- -<1> Override the default `Boolean` converter and use the provided converter which accepts a `YES` or `NO` config values. - - -The corresponding `application.properties` will look like. -[source%nowrap,properties] ----- -quarkus.some.answer=YES ----- - -[NOTE] -===== -Enum values (config items) are translated to skewed-case (hyphenated) by default. The table below illustrates an enum name and their canonical equivalence: - -|=== -|Java enum| Canonical equivalent - -|DISCARD -|discard - -|READ_UNCOMMITTED -|read-uncommitted - -|SIGUSR1 -|sigusr1 - -|JavaEnum -|java-enum - -|MAKING_LifeDifficult -|making-life-difficult - -|YeOldeJBoss -|ye-olde-jboss - -|camelCaseEnum -|camel-case-enum - -|=== - -To use the default behaviour which is based on implicit converter or a custom defined one add `@DefaultConverter` annotation to the configuration item -[source%nowrap,java] ----- -@ConfigRoot -public class SomeLogConfig { - /** - * The level of logs to be written into the file. - */ - @DefaultConverter // <1> - @ConfigItem(defaultValue = "ALL") - Level level; -} ----- -<1> Use the default converter (built in or a custom converter) to convert `Level.class` enum. -===== - +A configuration mapping name can contain an extra suffix segment for the case where there are configuration +mappings for multiple <>. Classes which correspond to the `BUILD_TIME` and `BUILD_AND_RUN_TIME_FIXED` +may end with `BuildTimeConfig` or `BuildTimeConfiguration`, classes which correspond to the `RUN_TIME` phase +may end with `RuntimeConfig`, `RunTimeConfig`, `RuntimeConfiguration` or `RunTimeConfiguration` while classes which +correspond to the `BOOTSTRAP` configuration may end with `BootstrapConfig` or `BootstrapConfiguration`. === Conditional Step Inclusion @@ -1407,7 +1215,7 @@ has two optional parameters: `onlyIf` and `onlyIfNot`. These parameters can be which implement `BooleanSupplier`. The build step will only be included when the method returns `true` (for `onlyIf`) or `false` (for `onlyIfNot`). -The condition class can inject <> as long as they belong to +The condition class can inject <> as long as they belong to a build-time phase. Run time configuration is not available for condition classes. The condition class may also inject a value of type `io.quarkus.runtime.LaunchMode`. @@ -3189,15 +2997,27 @@ Then, at the end of your documentation, include the extensive documentation: Finally, generate the documentation and check it out. +[[ecosystem]] +== Ecosystem integration + +Some extensions may be private, and some may wish to be part of the +broader Quarkus ecosystem, and available for community re-use. +Inclusion in the Quarkiverse Hub is a convenient mechanism for handling +continuous testing and publication. +The link:https://github.com/quarkiverse/quarkiverse/wiki#getting-an-extension-onboarded[Quarkiverse Hub wiki] has instructions +for on-boarding your extension. + +Alternatively, continuous testing and publication can be handled manually. + [[ecosystem-ci]] -== Continuous testing of your extension +=== Continuous testing of your extension In order to make it easy for extension authors to test their extensions daily against the latest snapshot of Quarkus, Quarkus has introduced the notion of Ecosystem CI. The Ecosystem CI link:https://github.com/quarkusio/quarkus-ecosystem-ci/blob/main/README.adoc[README] has all the details on how to set up a GitHub Actions job to take advantage of this capability, while this link:https://www.youtube.com/watch?v=VpbRA1n0hHQ[video] provides an overview of what the process looks like. -== Publish your extension in registry.quarkus.io +=== Publish your extension in registry.quarkus.io Before publishing your extension to the xref:tooling.adoc[Quarkus tooling], make sure that the following requirements are met: diff --git a/docs/src/main/java/io/quarkus/docs/generation/QuarkusMavenPluginDocsGenerator.java b/docs/src/main/java/io/quarkus/docs/generation/QuarkusMavenPluginDocsGenerator.java new file mode 100644 index 0000000000000..0020be0a5c902 --- /dev/null +++ b/docs/src/main/java/io/quarkus/docs/generation/QuarkusMavenPluginDocsGenerator.java @@ -0,0 +1,106 @@ +package io.quarkus.docs.generation; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.Reader; +import java.nio.file.Files; +import java.nio.file.Path; + +import org.apache.maven.plugin.descriptor.MojoDescriptor; +import org.apache.maven.plugin.descriptor.Parameter; +import org.apache.maven.plugin.descriptor.PluginDescriptor; +import org.apache.maven.plugin.descriptor.PluginDescriptorBuilder; +import org.codehaus.plexus.util.xml.XmlStreamReader; + +import io.quarkus.annotation.processor.Constants; +import io.quarkus.annotation.processor.generate_doc.ConfigDocWriter; +import io.quarkus.annotation.processor.generate_doc.MavenConfigDocBuilder; +import io.quarkus.annotation.processor.generate_doc.MavenConfigDocBuilder.GoalParamsBuilder; + +/** + * Generates documentation for the Quarkus Maven Plugin from plugin descriptor. + */ +public class QuarkusMavenPluginDocsGenerator { + + private static final String QUARKUS_MAVEN_PLUGIN = "quarkus-maven-plugin-"; + private static final String GOALS_OUTPUT_FILE_NAME = QUARKUS_MAVEN_PLUGIN + "goals" + Constants.ADOC_EXTENSION; + private static final String GOAL_PARAMETER_ANCHOR_PREFIX = QUARKUS_MAVEN_PLUGIN + "goal-%s-"; + + public static void main(String[] args) throws Exception { + + String errorMessage = null; + + // Path to Quarkus Maven Plugin descriptor (plugin.xml) + final Path pluginXmlDescriptorPath; + if (args.length == 1) { + pluginXmlDescriptorPath = Path.of(args[0]); + } else { + pluginXmlDescriptorPath = null; + errorMessage = String.format("Expected 1 argument ('plugin.xml' file path), got %s", args.length); + } + + // Check the file exist + if (pluginXmlDescriptorPath != null + && (!Files.exists(pluginXmlDescriptorPath) || !Files.isRegularFile(pluginXmlDescriptorPath))) { + errorMessage = String.format("File does not exist: %s", pluginXmlDescriptorPath.toAbsolutePath()); + } + + // Deserialize plugin.xml to PluginDescriptor + PluginDescriptor pluginDescriptor = null; + if (errorMessage == null) { + try (Reader input = new XmlStreamReader(new FileInputStream(pluginXmlDescriptorPath.toFile()))) { + pluginDescriptor = new PluginDescriptorBuilder().build(input); + } catch (IOException e) { + errorMessage = String.format("Failed to deserialize PluginDescriptor: %s", e.getMessage()); + } + } + + // Don't generate documentation if there are no goals (shouldn't happen if correct descriptor is available) + if (pluginDescriptor != null && (pluginDescriptor.getMojos() == null || pluginDescriptor.getMojos().isEmpty())) { + errorMessage = "Found no goals"; + } + + // Don't break the build if Quarkus Maven Plugin Descriptor is not available + if (errorMessage != null) { + System.err.printf("Can't generate the documentation for the Quarkus Maven Plugin\n: %s\n", errorMessage); + return; + } + + // Build Goals documentation + final var goalsConfigDocBuilder = new MavenConfigDocBuilder(); + for (MojoDescriptor mojo : pluginDescriptor.getMojos()) { + + // Add Goal Title + goalsConfigDocBuilder.addTableTitle(mojo.getFullGoalName()); + + // Add Goal Description + if (mojo.getDescription() != null && !mojo.getDescription().isBlank()) { + goalsConfigDocBuilder.addTableDescription(mojo.getDescription()); + } + + // Collect Goal Parameters + final GoalParamsBuilder goalParamsBuilder = goalsConfigDocBuilder.newGoalParamsBuilder(); + if (mojo.getParameters() != null) { + for (Parameter parameter : mojo.getParameters()) { + goalParamsBuilder.addParam(parameter.getType(), parameter.getName(), parameter.getDefaultValue(), + parameter.isRequired(), parameter.getDescription()); + } + } + + // Add Parameters Summary Table if the goal has parameters + if (goalParamsBuilder.tableIsNotEmpty()) { + goalsConfigDocBuilder.addSummaryTable(String.format(GOAL_PARAMETER_ANCHOR_PREFIX, mojo.getGoal()), false, + goalParamsBuilder.build(), GOALS_OUTPUT_FILE_NAME, false); + + // Start next table on a new line + goalsConfigDocBuilder.addNewLine(); + } + } + + // Generate Goals documentation + if (goalsConfigDocBuilder.hasWriteItems()) { + new ConfigDocWriter().generateDocumentation(GOALS_OUTPUT_FILE_NAME, goalsConfigDocBuilder); + } + } + +} diff --git a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java index 769ad707fd29e..e90502dc4dfe2 100644 --- a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java +++ b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java @@ -40,6 +40,7 @@ import io.quarkus.deployment.Feature; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.Consume; import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.ExtensionSslNativeSupportBuildItem; @@ -48,6 +49,7 @@ import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.pkg.builditem.CurateOutcomeBuildItem; +import io.quarkus.narayana.jta.deployment.NarayanaInitBuildItem; import io.quarkus.runtime.configuration.ConfigurationException; import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem; @@ -221,6 +223,7 @@ void generateDataSourceSupportBean(AgroalRecorder recorder, @Record(ExecutionTime.RUNTIME_INIT) @BuildStep + @Consume(NarayanaInitBuildItem.class) void generateDataSourceBeans(AgroalRecorder recorder, DataSourcesRuntimeConfig dataSourcesRuntimeConfig, List aggregatedBuildTimeConfigBuildItems, diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java index 5008a18b6da99..9adbe0abf066c 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java @@ -22,6 +22,7 @@ import javax.transaction.TransactionSynchronizationRegistry; import org.jboss.logging.Logger; +import org.jboss.tm.XAResourceRecoveryRegistry; import io.agroal.api.AgroalDataSource; import io.agroal.api.AgroalPoolInterceptor; @@ -72,6 +73,7 @@ public class DataSources { private final DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig; private final DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig; private final TransactionManager transactionManager; + private final XAResourceRecoveryRegistry xaResourceRecoveryRegistry; private final TransactionSynchronizationRegistry transactionSynchronizationRegistry; private final DataSourceSupport dataSourceSupport; private final Instance agroalPoolInterceptors; @@ -82,6 +84,7 @@ public DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, DataSourcesRuntimeConfig dataSourcesRuntimeConfig, DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig, DataSourcesJdbcRuntimeConfig dataSourcesJdbcRuntimeConfig, TransactionManager transactionManager, + XAResourceRecoveryRegistry xaResourceRecoveryRegistry, TransactionSynchronizationRegistry transactionSynchronizationRegistry, DataSourceSupport dataSourceSupport, @Any Instance agroalPoolInterceptors) { this.dataSourcesBuildTimeConfig = dataSourcesBuildTimeConfig; @@ -89,6 +92,7 @@ public DataSources(DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig, this.dataSourcesJdbcBuildTimeConfig = dataSourcesJdbcBuildTimeConfig; this.dataSourcesJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig; this.transactionManager = transactionManager; + this.xaResourceRecoveryRegistry = xaResourceRecoveryRegistry; this.transactionSynchronizationRegistry = transactionSynchronizationRegistry; this.dataSourceSupport = dataSourceSupport; this.agroalPoolInterceptors = agroalPoolInterceptors; @@ -268,7 +272,10 @@ private void applyNewConfiguration(AgroalDataSourceConfigurationSupplier dataSou if (dataSourceJdbcBuildTimeConfig.transactions != io.quarkus.agroal.runtime.TransactionIntegration.DISABLED) { TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager, - transactionSynchronizationRegistry); + transactionSynchronizationRegistry, null, false, + dataSourceJdbcBuildTimeConfig.transactions == io.quarkus.agroal.runtime.TransactionIntegration.XA + ? xaResourceRecoveryRegistry + : null); poolConfiguration.transactionIntegration(txIntegration); } @@ -287,12 +294,14 @@ private void applyNewConfiguration(AgroalDataSourceConfigurationSupplier dataSou // Authentication if (dataSourceRuntimeConfig.username.isPresent()) { + NamePrincipal username = new NamePrincipal(dataSourceRuntimeConfig.username.get()); connectionFactoryConfiguration - .principal(new NamePrincipal(dataSourceRuntimeConfig.username.get())); + .principal(username).recoveryPrincipal(username); } if (dataSourceRuntimeConfig.password.isPresent()) { + SimplePassword password = new SimplePassword(dataSourceRuntimeConfig.password.get()); connectionFactoryConfiguration - .credential(new SimplePassword(dataSourceRuntimeConfig.password.get())); + .credential(password).recoveryCredential(password); } // credentials provider diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java index 3fc42d9158fcd..4e08d4f01c182 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/StartupBuildSteps.java @@ -1,12 +1,20 @@ package io.quarkus.arc.deployment; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + import javax.enterprise.context.spi.Contextual; import javax.enterprise.context.spi.CreationalContext; +import javax.enterprise.inject.spi.ObserverMethod; import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.AnnotationTarget.Kind; import org.jboss.jandex.AnnotationValue; import org.jboss.jandex.DotName; +import org.jboss.jandex.MethodInfo; import io.quarkus.arc.Arc; import io.quarkus.arc.ArcContainer; @@ -19,6 +27,7 @@ import io.quarkus.arc.processor.BeanInfo; import io.quarkus.arc.processor.BuildExtension; import io.quarkus.arc.processor.BuiltinScope; +import io.quarkus.arc.processor.DotNames; import io.quarkus.arc.processor.ObserverConfigurator; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; @@ -61,33 +70,62 @@ UnremovableBeanBuildItem unremovableBeans() { } @BuildStep - void registerStartupObservers(ObserverRegistrationPhaseBuildItem observerRegistrationPhase, + void registerStartupObservers(ObserverRegistrationPhaseBuildItem observerRegistration, BuildProducer configurators) { - AnnotationStore annotationStore = observerRegistrationPhase.getContext().get(BuildExtension.Key.ANNOTATION_STORE); + AnnotationStore annotationStore = observerRegistration.getContext().get(BuildExtension.Key.ANNOTATION_STORE); - for (BeanInfo bean : observerRegistrationPhase.getContext().beans().withTarget()) { - AnnotationInstance startupAnnotation = annotationStore.getAnnotation(bean.getTarget().get(), STARTUP_NAME); + for (BeanInfo bean : observerRegistration.getContext().beans().withTarget()) { + // First check if the target is annotated with @Startup + // Class for class-based bean, method for producer method, etc. + AnnotationTarget target = bean.getTarget().get(); + AnnotationInstance startupAnnotation = annotationStore.getAnnotation(target, STARTUP_NAME); if (startupAnnotation != null) { - registerStartupObserver(observerRegistrationPhase, bean, startupAnnotation); + String id; + if (target.kind() == Kind.METHOD) { + id = target.asMethod().declaringClass().name() + "#" + target.asMethod().toString(); + } else if (target.kind() == Kind.FIELD) { + id = target.asField().declaringClass().name() + "#" + target.asField().toString(); + } else { + id = target.asClass().name().toString(); + } + AnnotationValue priority = startupAnnotation.value(); + registerStartupObserver(observerRegistration, bean, id, + priority != null ? priority.asInt() : ObserverMethod.DEFAULT_PRIORITY, null); + } + + List startupMethods = Collections.emptyList(); + if (target.kind() == Kind.CLASS) { + // If the target is a class then collect all non-static non-producer no-args methods annotated with @Startup + startupMethods = new ArrayList<>(); + for (MethodInfo method : target.asClass().methods()) { + if (!method.isSynthetic() + && !Modifier.isStatic(method.flags()) + && method.parametersCount() == 0 + && annotationStore.hasAnnotation(method, STARTUP_NAME) + && !annotationStore.hasAnnotation(method, DotNames.PRODUCES)) { + startupMethods.add(method); + } + } + } + if (!startupMethods.isEmpty()) { + for (MethodInfo method : startupMethods) { + AnnotationValue priority = annotationStore.getAnnotation(method, STARTUP_NAME).value(); + registerStartupObserver(observerRegistration, bean, + method.declaringClass().name() + "#" + method.toString(), + priority != null ? priority.asInt() : ObserverMethod.DEFAULT_PRIORITY, method); + } } } } - private void registerStartupObserver(ObserverRegistrationPhaseBuildItem observerRegistrationPhase, BeanInfo bean, - AnnotationInstance startup) { - ObserverConfigurator configurator = observerRegistrationPhase.getContext().configure() + private void registerStartupObserver(ObserverRegistrationPhaseBuildItem observerRegistration, BeanInfo bean, String id, + int priority, MethodInfo startupMethod) { + ObserverConfigurator configurator = observerRegistration.getContext().configure() .beanClass(bean.getBeanClass()) .observedType(StartupEvent.class); - if (startup.target().kind() == Kind.METHOD) { - configurator.id(startup.target().asMethod().toString()); - } else if (startup.target().kind() == Kind.FIELD) { - configurator.id(startup.target().asField().name()); - } - AnnotationValue priority = startup.value(); - if (priority != null) { - configurator.priority(priority.asInt()); - } + configurator.id(id); + configurator.priority(priority); configurator.notify(mc -> { // InjectableBean bean = Arc.container().bean("bflmpsvz"); ResultHandle containerHandle = mc.invokeStaticMethod(ARC_CONTAINER); @@ -95,24 +133,29 @@ private void registerStartupObserver(ObserverRegistrationPhaseBuildItem observer mc.load(bean.getIdentifier())); if (BuiltinScope.DEPENDENT.is(bean.getScope())) { // It does not make a lot of sense to support @Startup dependent beans but it's still a valid use case - ResultHandle contextHandle = mc.newInstance( + ResultHandle creationalContext = mc.newInstance( MethodDescriptor.ofConstructor(CreationalContextImpl.class, Contextual.class), beanHandle); // Create a dependent instance - ResultHandle instanceHandle = mc.invokeInterfaceMethod(CONTEXTUAL_CREATE, beanHandle, - contextHandle); + ResultHandle instance = mc.invokeInterfaceMethod(CONTEXTUAL_CREATE, beanHandle, + creationalContext); + if (startupMethod != null) { + mc.invokeVirtualMethod(MethodDescriptor.of(startupMethod), instance); + } // But destroy the instance immediately - mc.invokeInterfaceMethod(CONTEXTUAL_DESTROY, beanHandle, instanceHandle, contextHandle); + mc.invokeInterfaceMethod(CONTEXTUAL_DESTROY, beanHandle, instance, creationalContext); } else { // Obtains the instance from the context // InstanceHandle handle = Arc.container().instance(bean); ResultHandle instanceHandle = mc.invokeInterfaceMethod(ARC_CONTAINER_INSTANCE, containerHandle, beanHandle); - if (bean.getScope().isNormal()) { + ResultHandle instance = mc.invokeInterfaceMethod(INSTANCE_HANDLE_GET, instanceHandle); + if (startupMethod != null) { + mc.invokeVirtualMethod(MethodDescriptor.of(startupMethod), instance); + } else if (bean.getScope().isNormal()) { // We need to unwrap the client proxy // ((ClientProxy) handle.get()).arc_contextualInstance(); - ResultHandle proxyHandle = mc.checkCast( - mc.invokeInterfaceMethod(INSTANCE_HANDLE_GET, instanceHandle), ClientProxy.class); + ResultHandle proxyHandle = mc.checkCast(instance, ClientProxy.class); mc.invokeInterfaceMethod(CLIENT_PROXY_CONTEXTUAL_INSTANCE, proxyHandle); } } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/startup/StartupAnnotationTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/startup/StartupAnnotationTest.java index b8309310c7e12..85b099dbd5f62 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/startup/StartupAnnotationTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/startup/StartupAnnotationTest.java @@ -69,8 +69,8 @@ public void transform(TransformationContext context) { @Test public void testStartup() { - // StartMe, SingletonStartMe, ProducerStartMe, DependentStartMe - assertEquals(14, LOG.size(), "Unexpected number of log messages: " + LOG); + // StartMe, SingletonStartMe, ProducerStartMe, StartupMethods, DependentStartMe + assertEquals(17, LOG.size(), "Unexpected number of log messages: " + LOG); assertEquals("startMe_c", LOG.get(0)); assertEquals("startMe_c", LOG.get(1)); assertEquals("startMe_pc", LOG.get(2)); @@ -82,9 +82,12 @@ public void testStartup() { assertEquals("producer_pc", LOG.get(8)); assertEquals("produce_string", LOG.get(9)); assertEquals("producer_pd", LOG.get(10)); - assertEquals("dependent_c", LOG.get(11)); - assertEquals("dependent_pc", LOG.get(12)); - assertEquals("dependent_pd", LOG.get(13)); + assertEquals("startup_pc", LOG.get(11)); + assertEquals("startup_first", LOG.get(12)); + assertEquals("startup_second", LOG.get(13)); + assertEquals("dependent_c", LOG.get(14)); + assertEquals("dependent_pc", LOG.get(15)); + assertEquals("dependent_pd", LOG.get(16)); } // This component should be started first @@ -109,7 +112,7 @@ void destroy() { } - // @Startup is added by an annotation transformer + // @Startup is added by an annotation transformer, the priority is ObserverMethod.DEFAULT_PRIORITY @Unremovable // only classes annotated with @Startup are made unremovable @Singleton static class SingletonStartMe { @@ -152,14 +155,14 @@ void destroy() { static class ProducerStartMe { - @Startup(Integer.MAX_VALUE - 1) + @Startup(Integer.MAX_VALUE - 10) @Produces String produceString() { LOG.add("produce_string"); return "ok"; } - @Startup(Integer.MAX_VALUE - 2) + @Startup(Integer.MAX_VALUE - 20) @Produces Long produceLong() { LOG.add("produce_long"); @@ -178,4 +181,26 @@ void destroy() { } + @Singleton + @Unremovable // only classes annotated with @Startup are made unremovable + static class StartupMethods { + + @Startup(Integer.MAX_VALUE - 2) + String first() { + LOG.add("startup_first"); + return "ok"; + } + + @Startup(Integer.MAX_VALUE - 1) + void second() { + LOG.add("startup_second"); + } + + @PostConstruct + void init() { + LOG.add("startup_pc"); + } + + } + } diff --git a/extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/RequestScopeLeakTest.java b/extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/RequestScopeLeakTest.java new file mode 100644 index 0000000000000..2c2c8f62b7e8a --- /dev/null +++ b/extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/RequestScopeLeakTest.java @@ -0,0 +1,151 @@ +package io.quarkus.funqy.test; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +import java.util.concurrent.atomic.AtomicInteger; + +import javax.annotation.PreDestroy; +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.funqy.Funq; +import io.quarkus.test.QuarkusUnitTest; +import io.restassured.RestAssured; +import io.smallrye.common.vertx.VertxContext; +import io.smallrye.mutiny.Uni; +import io.vertx.core.Context; +import io.vertx.core.Vertx; + +public class RequestScopeLeakTest { + + @RegisterExtension + static QuarkusUnitTest test = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(MyBean.class, Identity.class, Greeting.class, MyFunction.class) + .addAsResource("greeting-uni.properties", "application.properties")); + + @BeforeEach + void cleanup() { + MyBean.DISPOSED.set(0); + } + + @Test + public void testRequestScope() { + RestAssured.given().contentType("application/json") + .body("{\"name\": \"Roxanne\"}") + .post("/") + .then().statusCode(200) + .header("ce-id", nullValue()) + .body("name", equalTo("Roxanne")) + .body("message", equalTo("Hello Roxanne!")); + + Assertions.assertEquals(1, MyBean.DISPOSED.get()); + } + + @Test + public void testRequestScopeWithSyncFailure() { + RestAssured.given().contentType("application/json") + .body("{\"name\": \"sync-failure\"}") + .post("/") + .then().statusCode(500); + Assertions.assertEquals(1, MyBean.DISPOSED.get()); + } + + @Test + public void testRequestScopeWithSyncFailureInPipeline() { + RestAssured.given().contentType("application/json") + .body("{\"name\": \"sync-failure-pipeline\"}") + .post("/") + .then().statusCode(500); + Assertions.assertEquals(1, MyBean.DISPOSED.get()); + } + + @Test + public void testRequestScopeWithASyncFailure() { + RestAssured.given().contentType("application/json") + .body("{\"name\": \"async-failure\"}") + .post("/") + .then().statusCode(500); + Assertions.assertEquals(1, MyBean.DISPOSED.get()); + } + + @RequestScoped + public static class MyBean { + public static AtomicInteger DISPOSED = new AtomicInteger(); + + private final AtomicInteger counter = new AtomicInteger(); + + public int inc() { + return counter.getAndIncrement(); + } + + public void get() { + counter.get(); + } + + @PreDestroy + public void destroy() { + DISPOSED.incrementAndGet(); + } + } + + public static class MyFunction { + + @Inject + MyBean bean; + @Inject + Vertx vertx; + + @Funq + public Uni greeting(Identity name) { + Context context = Vertx.currentContext(); + Assertions.assertTrue(VertxContext.isOnDuplicatedContext()); + + if (name.getName().equals("sync-failure")) { + Assertions.assertEquals(0, bean.inc()); + throw new IllegalArgumentException("expected sync-failure"); + } + + return Uni.createFrom().item("Hello " + name.getName() + "!") + .invoke(() -> { + Assertions.assertEquals(0, bean.inc()); + Assertions.assertSame(context, Vertx.currentContext()); + }) + .chain(this::nap) + .invoke(() -> { + Assertions.assertEquals(1, bean.inc()); + Assertions.assertSame(context, Vertx.currentContext()); + }) + .invoke(() -> { + if (name.getName().equals("sync-failure-pipeline")) { + throw new IllegalArgumentException("expected sync-failure-in-pipeline"); + } + }) + .map(s -> { + Greeting greeting = new Greeting(); + greeting.setName(name.getName()); + greeting.setMessage(s); + return greeting; + }) + .chain(greeting -> { + if (greeting.getName().equals("async-failure")) { + return Uni.createFrom().failure(() -> new IllegalArgumentException("expected async-failure")); + } + return Uni.createFrom().item(greeting); + }); + } + + public Uni nap(String s) { + return Uni.createFrom().emitter(e -> { + vertx.setTimer(100, x -> e.complete(s)); + }); + } + + } +} diff --git a/extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/RequestScopeTest.java b/extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/RequestScopeTest.java new file mode 100644 index 0000000000000..01d51b8b9b033 --- /dev/null +++ b/extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/RequestScopeTest.java @@ -0,0 +1,103 @@ +package io.quarkus.funqy.test; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +import java.util.concurrent.atomic.AtomicInteger; + +import javax.annotation.PreDestroy; +import javax.enterprise.context.RequestScoped; +import javax.inject.Inject; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.funqy.Funq; +import io.quarkus.test.QuarkusUnitTest; +import io.restassured.RestAssured; +import io.smallrye.common.vertx.VertxContext; + +public class RequestScopeTest { + + @RegisterExtension + static QuarkusUnitTest test = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(MyBean.class, Identity.class, Greeting.class, MyFunction.class) + .addAsResource("greeting.properties", "application.properties")); + + @BeforeEach + void cleanup() { + MyBean.DISPOSED.set(0); + } + + @Test + public void testRequestScope() { + RestAssured.given().contentType("application/json") + .body("{\"name\": \"Roxanne\"}") + .post("/") + .then().statusCode(200) + .header("ce-id", nullValue()) + .body("name", equalTo("Roxanne")) + .body("message", equalTo("Hello Roxanne!")); + + Assertions.assertEquals(1, MyBean.DISPOSED.get()); + } + + @Test + public void testRequestScopeTerminationWithSynchronousFailure() { + String body = RestAssured.given().contentType("application/json") + .body("{\"name\": \"failure\"}") + .post("/") + .then().statusCode(500).extract().asString(); + + Assertions.assertTrue(body.contains("expected failure")); + Assertions.assertEquals(1, MyBean.DISPOSED.get()); + } + + @RequestScoped + public static class MyBean { + + public static AtomicInteger DISPOSED = new AtomicInteger(); + + private final AtomicInteger counter = new AtomicInteger(); + + public int inc() { + return counter.getAndIncrement(); + } + + public void get() { + counter.get(); + } + + @PreDestroy + public void destroy() { + DISPOSED.incrementAndGet(); + } + } + + public static class MyFunction { + + @Inject + MyBean bean; + + @Funq + public Greeting greet(Identity name) { + Assertions.assertTrue(VertxContext.isOnDuplicatedContext()); + Assertions.assertEquals(0, bean.inc()); + + if (name.getName().equals("failure")) { + throw new IllegalArgumentException("expected failure"); + } + + Greeting greeting = new Greeting(); + greeting.setName(name.getName()); + greeting.setMessage("Hello " + name.getName() + "!"); + + Assertions.assertEquals(1, bean.inc()); + return greeting; + } + + } +} diff --git a/extensions/funqy/funqy-knative-events/deployment/src/test/resources/greeting-uni.properties b/extensions/funqy/funqy-knative-events/deployment/src/test/resources/greeting-uni.properties new file mode 100644 index 0000000000000..ed2792cb11d1d --- /dev/null +++ b/extensions/funqy/funqy-knative-events/deployment/src/test/resources/greeting-uni.properties @@ -0,0 +1 @@ +quarkus.funqy.export=greeting \ No newline at end of file diff --git a/extensions/funqy/funqy-knative-events/runtime/src/main/java/io/quarkus/funqy/runtime/bindings/knative/events/VertxRequestHandler.java b/extensions/funqy/funqy-knative-events/runtime/src/main/java/io/quarkus/funqy/runtime/bindings/knative/events/VertxRequestHandler.java index f9d87c003ef05..eb172090110f0 100644 --- a/extensions/funqy/funqy-knative-events/runtime/src/main/java/io/quarkus/funqy/runtime/bindings/knative/events/VertxRequestHandler.java +++ b/extensions/funqy/funqy-knative-events/runtime/src/main/java/io/quarkus/funqy/runtime/bindings/knative/events/VertxRequestHandler.java @@ -562,19 +562,25 @@ private FunqyServerResponse dispatch(CloudEvent event, RoutingContext routingCon } } currentVertxRequest.setCurrent(routingContext); - try { - RequestContextImpl funqContext = new RequestContextImpl(); - if (event != null) { - funqContext.setContextData(CloudEvent.class, event); - } - FunqyRequestImpl funqyRequest = new FunqyRequestImpl(funqContext, input); - FunqyResponseImpl funqyResponse = new FunqyResponseImpl(); - invoker.invoke(funqyRequest, funqyResponse); - return funqyResponse; - } finally { - if (requestContext.isActive()) { - requestContext.terminate(); - } + RequestContextImpl funqContext = new RequestContextImpl(); + if (event != null) { + funqContext.setContextData(CloudEvent.class, event); } + FunqyRequestImpl funqyRequest = new FunqyRequestImpl(funqContext, input); + FunqyResponseImpl funqyResponse = new FunqyResponseImpl(); + invoker.invoke(funqyRequest, funqyResponse); + + // The invoker set the output, but we need to extend that output (a Uni) with a termination block deactivating the + // request context if activated. + funqyResponse.setOutput(funqyResponse.getOutput() + .onTermination().invoke(new Runnable() { + @Override + public void run() { + if (requestContext.isActive()) { + requestContext.terminate(); + } + } + })); + return funqyResponse; } } diff --git a/extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/FunctionInvoker.java b/extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/FunctionInvoker.java index aeff7760bf343..e101cd63b93fc 100644 --- a/extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/FunctionInvoker.java +++ b/extensions/funqy/funqy-server-common/runtime/src/main/java/io/quarkus/funqy/runtime/FunctionInvoker.java @@ -122,22 +122,22 @@ public void invoke(FunqyServerRequest request, FunqyServerResponse response) { try { Object result = method.invoke(target, args); if (isAsync()) { - response.setOutput(((Uni) result).onFailure().transform(t -> new ApplicationException(t))); + response.setOutput(((Uni) result) + .onFailure().transform(t -> new ApplicationException(t))); } else { response.setOutput(Uni.createFrom().item(result)); } + // Catch the exception but do not rethrow the exception, + // The handler decorates the uni with a termination block to handle the request scope deactivation. } catch (IllegalAccessException e) { InternalError ex = new InternalError("Failed to invoke function", e); response.setOutput(Uni.createFrom().failure(ex)); - throw ex; } catch (InvocationTargetException e) { ApplicationException ex = new ApplicationException(e.getCause()); response.setOutput(Uni.createFrom().failure(ex)); - throw ex; } catch (Throwable t) { InternalError ex = new InternalError(t); response.setOutput(Uni.createFrom().failure(ex)); - throw ex; } } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java index 7b8c52d3ffbf4..f59c6dbd49ac9 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java @@ -25,6 +25,7 @@ import org.infinispan.client.hotrod.impl.ConfigurationProperties; import org.infinispan.client.hotrod.logging.Log; import org.infinispan.client.hotrod.logging.LogFactory; +import org.infinispan.commons.configuration.XMLStringConfiguration; import org.infinispan.commons.marshall.Marshaller; import org.infinispan.commons.marshall.ProtoStreamMarshaller; import org.infinispan.commons.util.Util; @@ -43,6 +44,7 @@ public class InfinispanClientProducer { private static final Log log = LogFactory.getLog(InfinispanClientProducer.class); + public static final String DEFAULT_CONFIG = ""; public static final String PROTOBUF_FILE_PREFIX = "infinispan.client.hotrod.protofile."; public static final String PROTOBUF_INITIALIZERS = "infinispan.client.hotrod.proto-initializers"; @@ -324,20 +326,33 @@ public RemoteCache getRemoteCache(InjectionPoint injectionPoint, Re final io.quarkus.infinispan.client.Remote remote = getRemoteAnnotation(annotationSet); if (cacheManager != null && remote != null && !remote.value().isEmpty()) { - return cacheManager.getCache(remote.value()); + RemoteCache cache = cacheManager.getCache(remote.value()); + if (cache == null) { + log.warn("Attempt to create cache using minimal default config"); + return cacheManager.administration() + .getOrCreateCache(remote.value(), new XMLStringConfiguration(DEFAULT_CONFIG)); + } + return cache; } if (cacheManager != null) { - return cacheManager.getCache(); + RemoteCache cache = cacheManager.getCache(); + if (cache == null) { + log.warn("Attempt to create cache using minimal default config"); + return cacheManager.administration() + .getOrCreateCache(remote.value(), new XMLStringConfiguration(DEFAULT_CONFIG)); + } + return cache; } + log.error("Unable to produce RemoteCache. RemoteCacheManager is null"); return null; } @Produces - public CounterManager counterManager() { - RemoteCacheManager cacheManager = remoteCacheManager(); + public CounterManager counterManager(RemoteCacheManager cacheManager) { if (cacheManager == null) { + log.error("Unable to produce CounterManager. RemoteCacheManager is null"); return null; } return RemoteCounterManagerFactory.asCounterManager(cacheManager); diff --git a/extensions/jdbc/jdbc-db2/deployment/pom.xml b/extensions/jdbc/jdbc-db2/deployment/pom.xml index a412fa6bb6395..a803ad71c492f 100644 --- a/extensions/jdbc/jdbc-db2/deployment/pom.xml +++ b/extensions/jdbc/jdbc-db2/deployment/pom.xml @@ -48,6 +48,11 @@ assertj-core test + + org.eclipse.transformer + org.eclipse.transformer + 0.5.0 + diff --git a/extensions/jdbc/jdbc-db2/deployment/src/main/java/io/quarkus/jdbc/db2/deployment/JakartaEnablement.java b/extensions/jdbc/jdbc-db2/deployment/src/main/java/io/quarkus/jdbc/db2/deployment/JakartaEnablement.java new file mode 100644 index 0000000000000..c74ed6b81b965 --- /dev/null +++ b/extensions/jdbc/jdbc-db2/deployment/src/main/java/io/quarkus/jdbc/db2/deployment/JakartaEnablement.java @@ -0,0 +1,82 @@ +package io.quarkus.jdbc.db2.deployment; + +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eclipse.transformer.action.ActionContext; +import org.eclipse.transformer.action.ByteData; +import org.eclipse.transformer.action.impl.ActionContextImpl; +import org.eclipse.transformer.action.impl.ByteDataImpl; +import org.eclipse.transformer.action.impl.ClassActionImpl; +import org.eclipse.transformer.action.impl.SelectionRuleImpl; +import org.eclipse.transformer.action.impl.SignatureRuleImpl; +import org.eclipse.transformer.util.FileUtils; +import org.objectweb.asm.ClassReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.quarkus.bootstrap.classloading.QuarkusClassLoader; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem; + +/** + * The DB2 driver is compiled using references to classes in the javax.transaction + * package; we need to transform these to fix compatibility with jakarta.transaction. + * We do this by leveraging the Eclipse Transformer project during Augmentation, so + * that end users don't need to bother. + */ +public class JakartaEnablement { + + private static final List CLASSES_NEEDING_TRANSFORMATION = List.of( + "com.ibm.db2.jcc.t2zos.ab", + "com.ibm.db2.jcc.t2zos.T2zosConnection", + "com.ibm.db2.jcc.t2zos.T2zosConfiguration"); + + @BuildStep + void transformToJakarta(BuildProducer transformers) { + if (QuarkusClassLoader.isClassPresentAtRuntime("jakarta.transaction.Transaction")) { + JakartaTransformer tr = new JakartaTransformer(); + for (String classname : CLASSES_NEEDING_TRANSFORMATION) { + final BytecodeTransformerBuildItem item = new BytecodeTransformerBuildItem.Builder() + .setCacheable(true) + .setContinueOnFailure(false) + .setClassToTransform(classname) + .setClassReaderOptions(ClassReader.SKIP_DEBUG) + .setInputTransformer(tr::transform) + .build(); + transformers.produce(item); + } + } + } + + private static class JakartaTransformer { + + private final Logger logger; + private final ActionContext ctx; + + JakartaTransformer() { + logger = LoggerFactory.getLogger("JakartaTransformer"); + Map renames = new HashMap<>(); + //N.B. we enable only this transformation, not the full set of capabilities of Eclipse Transformer; + //this might need tailoring if the same idea gets applied to a different context. + renames.put("javax.transaction", "jakarta.transaction"); + ctx = new ActionContextImpl(logger, + new SelectionRuleImpl(logger, Collections.emptyMap(), Collections.emptyMap()), + new SignatureRuleImpl(logger, renames, null, null, null, null, null, Collections.emptyMap())); + } + + byte[] transform(final String name, final byte[] bytes) { + logger.info("Jakarta EE compatibility enhancer for Quarkus: transforming " + name); + final ClassActionImpl classTransformer = new ClassActionImpl(ctx); + final ByteBuffer input = ByteBuffer.wrap(bytes); + final ByteData inputData = new ByteDataImpl(name, input, FileUtils.DEFAULT_CHARSET); + final ByteData outputData = classTransformer.apply(inputData); + return outputData.buffer().array(); + } + } + +} diff --git a/extensions/kafka-client/deployment/pom.xml b/extensions/kafka-client/deployment/pom.xml index c1312bd48e115..a7da8dfa0277f 100644 --- a/extensions/kafka-client/deployment/pom.xml +++ b/extensions/kafka-client/deployment/pom.xml @@ -44,6 +44,10 @@ io.quarkus quarkus-caffeine-deployment + + io.quarkus + quarkus-vertx-http-dev-console-spi + org.testcontainers testcontainers diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java index b975b460e869b..420d8bd85a453 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java @@ -28,4 +28,5 @@ public class KafkaBuildTimeConfig { */ @ConfigItem public KafkaDevServicesBuildTimeConfig devservices; + } diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java index 41fb178c3ea4a..c29791d421569 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java @@ -60,6 +60,7 @@ import io.quarkus.deployment.Capabilities; import io.quarkus.deployment.Capability; import io.quarkus.deployment.Feature; +import io.quarkus.deployment.IsDevelopment; import io.quarkus.deployment.IsNormal; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; @@ -71,6 +72,7 @@ import io.quarkus.deployment.builditem.ExtensionSslNativeSupportBuildItem; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.IndexDependencyBuildItem; +import io.quarkus.deployment.builditem.LaunchModeBuildItem; import io.quarkus.deployment.builditem.LogCategoryBuildItem; import io.quarkus.deployment.builditem.RunTimeConfigurationDefaultBuildItem; import io.quarkus.deployment.builditem.RuntimeConfigSetupCompleteBuildItem; @@ -82,9 +84,14 @@ import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; import io.quarkus.deployment.logging.LogCleanupFilterBuildItem; import io.quarkus.deployment.pkg.NativeConfig; -import io.quarkus.kafka.client.runtime.KafkaBindingConverter; -import io.quarkus.kafka.client.runtime.KafkaRecorder; +import io.quarkus.dev.spi.DevModeType; +import io.quarkus.devconsole.spi.DevConsoleRouteBuildItem; +import io.quarkus.devconsole.spi.DevConsoleWebjarBuildItem; +import io.quarkus.kafka.client.runtime.*; import io.quarkus.kafka.client.runtime.KafkaRuntimeConfigProducer; +import io.quarkus.kafka.client.runtime.ui.KafkaTopicClient; +import io.quarkus.kafka.client.runtime.ui.KafkaUiRecorder; +import io.quarkus.kafka.client.runtime.ui.KafkaUiUtils; import io.quarkus.kafka.client.serialization.BufferDeserializer; import io.quarkus.kafka.client.serialization.BufferSerializer; import io.quarkus.kafka.client.serialization.JsonArrayDeserializer; @@ -95,6 +102,7 @@ import io.quarkus.kafka.client.serialization.JsonbSerializer; import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer; import io.quarkus.kafka.client.serialization.ObjectMapperSerializer; +import io.quarkus.maven.dependency.GACT; import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem; public class KafkaProcessor { @@ -144,6 +152,11 @@ public class KafkaProcessor { static final DotName PARTITION_ASSIGNER = DotName .createSimple("org.apache.kafka.clients.consumer.internals.PartitionAssignor"); + private static final GACT DEVCONSOLE_WEBJAR_ARTIFACT_KEY = new GACT("io.quarkus", + "quarkus-kafka-client-deployment", null, "jar"); + private static final String DEVCONSOLE_WEBJAR_STATIC_RESOURCES_PATH = "dev-static/"; + public static final String KAFKA_ADMIN_PATH = "kafka-admin"; + public static final String KAFKA_RESOURCES_ROOT_PATH = "kafka-ui"; @BuildStep FeatureBuildItem feature() { @@ -165,7 +178,8 @@ void silenceUnwantedConfigLogs(BuildProducer logClean List ignoredMessages = new ArrayList<>(); for (String ignoredConfigProperty : ignoredConfigProperties) { - ignoredMessages.add("The configuration '" + ignoredConfigProperty + "' was supplied but isn't a known config."); + ignoredMessages + .add("The configuration '" + ignoredConfigProperty + "' was supplied but isn't a known config."); } logCleanupFilters.produce(new LogCleanupFilterBuildItem("org.apache.kafka.clients.consumer.ConsumerConfig", @@ -478,4 +492,41 @@ void registerServiceBinding(Capabilities capabilities, KafkaBindingConverter.class.getName())); } } + + // Kafka UI related stuff + + @BuildStep + public AdditionalBeanBuildItem kafkaClientBeans() { + return AdditionalBeanBuildItem.builder() + .addBeanClass(KafkaAdminClient.class) + .addBeanClass(KafkaTopicClient.class) + .addBeanClass(KafkaUiUtils.class) + .setUnremovable() + .build(); + } + + @BuildStep(onlyIf = IsDevelopment.class) + @Record(ExecutionTime.RUNTIME_INIT) + public void registerKafkaUiExecHandler( + BuildProducer routeProducer, + KafkaUiRecorder recorder) { + routeProducer.produce(DevConsoleRouteBuildItem.builder() + .method("POST") + .handler(recorder.kafkaControlHandler()) + .path(KAFKA_ADMIN_PATH) + .bodyHandlerRequired() + .build()); + } + + @BuildStep(onlyIf = IsDevelopment.class) + public DevConsoleWebjarBuildItem setupWebJar(LaunchModeBuildItem launchModeBuildItem) { + if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { + return null; + } + return DevConsoleWebjarBuildItem.builder().artifactKey(DEVCONSOLE_WEBJAR_ARTIFACT_KEY) + .root(DEVCONSOLE_WEBJAR_STATIC_RESOURCES_PATH) + .routeRoot(KAFKA_RESOURCES_ROOT_PATH) + .build(); + } + } diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/config.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/config.js new file mode 100644 index 0000000000000..130b130828fbe --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/config.js @@ -0,0 +1,2 @@ +export const api = '/q/dev/io.quarkus.quarkus-kafka-client/kafka-admin'; +export const ui = 'kafka-ui'; \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/kafka_ui.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/kafka_ui.js new file mode 100644 index 0000000000000..36667854c1944 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/kafka_ui.js @@ -0,0 +1,9 @@ +import Navigator from './pages/navigator.js' + +const navigator = new Navigator(); +$(document).ready( + () => { + navigator.navigateToDefaultPage(); + } +); + diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/accessControlListPage.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/accessControlListPage.js new file mode 100644 index 0000000000000..bf10bd05b5e66 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/accessControlListPage.js @@ -0,0 +1,49 @@ +import {doPost, errorPopUp} from "../web/web.js"; +import {createTableItem} from "../util/contentManagement.js"; +import {toggleSpinner} from "../util/spinner.js"; + +export default class AccessControlListPage { + constructor(containerId) { + this.containerId = containerId; + Object.getOwnPropertyNames(AccessControlListPage.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + + open() { + const req = { + action: "getAclInfo" + }; + toggleSpinner(this.containerId); + doPost(req, (data) => { + setTimeout(() => { + this.updateInfo(data); + toggleSpinner(this.containerId); + }, 2000); + }, data => { + errorPopUp("Error getting Kafka ACL info: ", data); + }); + } + + updateInfo(data) { + $('#acluster-id').html(data.clusterId); + $('#acluster-controller').html(data.broker); + $('#acluster-acl').html(data.aclOperations); + + const acls = data.entries; + let aclTable = $('#acl-table tbody'); + aclTable.empty(); + for (let i = 0; i < acls.length; i++) { + const e = acls[i]; + let tableRow = $(""); + tableRow.append(createTableItem(e.operation)); + tableRow.append(createTableItem(e.prinipal)); + tableRow.append(createTableItem(e.perm)); + tableRow.append(createTableItem(e.pattern)); + aclTable.append(tableRow); + } + } +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/consumerGroupDetailsPage.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/consumerGroupDetailsPage.js new file mode 100644 index 0000000000000..3766588ce9292 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/consumerGroupDetailsPage.js @@ -0,0 +1,85 @@ +import {CollapseRow, createTableHead, createTableItem, createTableItemHtml} from "../util/contentManagement.js"; + +export default class ConsumerGroupDetailsPage { + constructor(containerId) { + this.containerId = containerId; + Object.getOwnPropertyNames(ConsumerGroupDetailsPage.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + open(params) { + const membersData = params[1]; + let consumerGroupsTable = $('#consumer-group-details-table tbody'); + consumerGroupsTable.empty(); + for (let i = 0; i < membersData.length; i++) { + const d = membersData[i]; + const groupId = "group-" + d.memberId; + + let tableRow = $(""); + let collapseRow; + if (d.partitions.length > 0) { + collapseRow = new CollapseRow(groupId); + tableRow.append(createTableItemHtml(collapseRow.arrow)); + } else { + tableRow.append(createTableItem("")); + } + + const memberId = $("") + .text(d.clientId); + const id = d.memberId.substring(d.clientId.length); + const text = $("

    ") + .append(memberId) + .append(id); + tableRow.append(createTableItemHtml(text)); + tableRow.append(createTableItem(d.host)); + tableRow.append(createTableItem("" + new Set(d.partitions.map(x => x.partition)).size)); + tableRow.append(createTableItem("" + d.partitions.map(x => x.lag).reduce((l, r) => l + r, 0))); + + if (d.partitions.length > 0) { + const content = this.createConsumerGroupCollapseInfo(d); + tableRow.addClass("pointer") + tableRow.click(() => collapseRow.collapse()); + consumerGroupsTable.append(tableRow); + consumerGroupsTable.append(collapseRow + .getCollapseContent(tableRow.children().length, content) + .addClass("no-hover")); + } else { + consumerGroupsTable.append(tableRow); + } + } + } + + createConsumerGroupCollapseInfo(dataItem) { + const collapseContent = $("") + .addClass("table") + .addClass("table-sm") + .addClass("no-hover"); + + const headers = $("") + .addClass("no-hover") + .append(createTableHead("Topic")) + .append(createTableHead("Partition")) + .append(createTableHead("Lag")); + const head = $("") + .append(headers); + + const body = $(""); + for (let partition of dataItem.partitions) { + const row = $("") + .addClass("no-hover"); + row.append(createTableItemHtml(partition.topic)) + row.append(createTableItemHtml(partition.partition)) + row.append(createTableItemHtml(partition.lag)) + body.append(row); + } + + collapseContent.append(head); + collapseContent.append(body); + + return collapseContent; + } + +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/consumerGroupPage.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/consumerGroupPage.js new file mode 100644 index 0000000000000..51928af67a7e3 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/consumerGroupPage.js @@ -0,0 +1,47 @@ +import {createTableItem} from "../util/contentManagement.js"; +import {doPost, errorPopUp} from "../web/web.js"; +import {pages} from "./navigator.js"; +import {toggleSpinner} from "../util/spinner.js"; + +export default class ConsumerGroupPage { + constructor(navigator, containerId) { + this.containerId = containerId; + this.navigator = navigator; + Object.getOwnPropertyNames(ConsumerGroupPage.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + open() { + toggleSpinner(this.containerId); + const req = { + action: "getInfo", key: "0", value: "0" + }; + doPost(req, (data) => { + this.updateConsumerGroups(data.consumerGroups); + toggleSpinner(this.containerId); + }, data => { + errorPopUp("Error getting Kafka info: ", data); + toggleSpinner(this.containerId); + }); + } + + updateConsumerGroups(data) { + let consumerGroupsTable = $('#consumer-groups-table tbody'); + consumerGroupsTable.empty(); + for (let i = 0; i < data.length; i++) { + const d = data[i]; + let tableRow = $(""); + tableRow.append(createTableItem(d.state)); + tableRow.append(createTableItem(d.name)); + tableRow.append(createTableItem(d.coordinatorId)); + tableRow.append(createTableItem(d.protocol)); + tableRow.append(createTableItem(d.members.length)); + tableRow.append(createTableItem(d.lag)); + tableRow.click(() => this.navigator.navigateTo(pages.CONSUMER_GROUPS_DETAILS, [d.name, d.members])); + consumerGroupsTable.append(tableRow); + } + } +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/messagesPage.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/messagesPage.js new file mode 100644 index 0000000000000..f009847be3826 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/messagesPage.js @@ -0,0 +1,449 @@ +import {doPost, errorPopUp} from "../web/web.js"; +import timestampToFormattedString from "../util/datetimeUtil.js"; +import {CollapseRow, createTableItem, createTableItemHtml} from "../util/contentManagement.js"; +import {toggleSpinner} from "../util/spinner.js"; + +const MODAL_KEY_TAB = "header-key-tab-pane"; +const PAGE_SIZE = 20; +const NEW_FIRST = "NEW_FIRST"; +const OLD_FIRST = "OLD_FIRST"; +const MESSAGES_SPINNER = "message-load-spinner"; +const MESSAGES_TABLE_BODY = "msg-table-body"; +const MESSAGES_TABLE_HOLDER = "msg-table-holder"; + +export default class MessagesPage { + constructor(containerId) { + this.containerId = containerId; + this.registerButtonHandlers(); + Object.getOwnPropertyNames(MessagesPage.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + registerButtonHandlers() { + $("#open-create-msg-modal-btn").click(() => { + $('#create-msg-modal').modal('show'); + this.setActiveTab(MODAL_KEY_TAB); + }); + + $('#send-msg-btn').click(this.createMessage.bind(this)); + + $('.close-modal-btn').click(() => { + $('.modal').modal('hide'); + this.setActiveTab(MODAL_KEY_TAB); + }); + + $('#msg-page-partition-select').multiselect({ + buttonClass: 'thead-multiselect', + includeSelectAllOption: true, + filterPlaceholder: 'Partitions', + selectAllText: 'Select All', + nonSelectedText: 'Partitions', + buttonText: function () { + return 'Partitions'; + } + }); + + $("#timestamp-sort-header").click(() => { + this.toggleSorting(); + window.currentContext.currentPage = 1; + this.loadMessages(); + }); + + $("#msg-page-partition-select").change(() => { + window.currentContext.currentPage = 1; + this.loadMessages(); + }); + + $(".previous").click(() => { + if (window.currentContext.currentPage === 1) return; + window.currentContext.currentPage = window.currentContext.currentPage - 1; + this.loadMessages(); + }) + + $(".next").click(() => { + if (window.currentContext.currentPage === this.getMaxPageNumber()) return; + window.currentContext.currentPage = window.currentContext.currentPage + 1; + this.loadMessages(); + }) + + $("#reload-msg-btn").click(() => { + currentContext.pagesCache = new Map(); + this.loadMessages(); + }); + } + + toggleSorting() { + if (currentContext.currentSorting === NEW_FIRST) { + currentContext.currentSorting = OLD_FIRST; + $("#timestamp-sort-icon") + .removeClass("bi-chevron-double-down") + .addClass("bi-chevron-double-up"); + } else { + currentContext.currentSorting = NEW_FIRST; + $("#timestamp-sort-icon") + .addClass("bi-chevron-double-down") + .removeClass("bi-chevron-double-up"); + } + } + + loadMessages() { + toggleSpinner(MESSAGES_TABLE_HOLDER, MESSAGES_SPINNER); + this.getPage(currentContext.currentPage, this.onMessagesLoaded, this.onMessagesFailed); + this.redrawPageNav(); + } + + open(params) { + toggleSpinner(MESSAGES_TABLE_HOLDER, MESSAGES_SPINNER); + const topicName = params[0]; + window.currentContext = { + topicName: topicName, + currentPage: 1, //always start with first page + pagesCache: new Map(), + currentSorting: NEW_FIRST + }; + + this.clearMessageTable(); + + new Promise((resolve, reject) => { + this.requestPartitions(topicName, resolve, reject); + }).then((data) => { + this.onPartitionsLoaded(data); + return new Promise((resolve) => { + setTimeout(() => { + resolve(); + }, 1000); + }); + }).then(() => { + this.loadMaxPageNumber(); + return new Promise((resolve) => { + setTimeout(() => { + resolve(); + }, 1000); + }); + }).then(() => { + this.getPage(currentContext.currentPage, this.onMessagesLoaded, this.onMessagesFailed); + return new Promise((resolve) => { + setTimeout(() => { + resolve(); + }, 1000); + }); + }) + .catch(() => errorPopUp("Failed loading page.")); + } + + // Key format: ORDER-partition1-partition2-...-partitionN-pageNumber. Like: NEW_FIRST-0-1-17 + generateCacheKey(pageNumber) { + const order = this.getOrder(); + const partitions = this.getPartitions(); + const partitionsKeyPart = partitions.reduce((partialKey, str) => partialKey + "-" + str, 0); + + return order + partitionsKeyPart + "-" + pageNumber; + } + + requestPartitions(topicName, onPartitionsLoaded, onPartitionsFailed) { + const rq = { + action: "getPartitions", topicName: topicName + } + + doPost(rq, onPartitionsLoaded, onPartitionsFailed); + } + + onPartitionsLoaded(data) { + let msgModalPartitionSelect = $('#msg-modal-partition-select'); + let msgPagePartitionSelect = $('#msg-page-partition-select'); + msgModalPartitionSelect.empty(); + msgPagePartitionSelect.empty(); + + msgModalPartitionSelect.append($(""); + const groupId = "group-" + window.crypto.randomUUID(); + const collapseRow = new CollapseRow(groupId); + tableRow.append(createTableItemHtml(collapseRow.arrow)); + + tableRow.append(createTableItem(messages[i].offset)); + tableRow.append(createTableItem(messages[i].partition)); + tableRow.append(createTableItem(timestampToFormattedString(messages[i].timestamp))); + tableRow.append(createTableItem(messages[i].key)); + + const value = messages[i].value; + const maxMsgLength = 75; + if (value.length < maxMsgLength) { + tableRow.append(createTableItem(value)); + } else { + tableRow.append(createTableItem(value.slice(0, maxMsgLength) + "...")); + } + tableRow.append(createTableItem()); + tableRow + .addClass("pointer") + .click(collapseRow.collapse); + msgTableBody.append(tableRow); + msgTableBody.append(collapseRow.getCollapseContent(tableRow.children().length, this.createMessageCollapseItem(value))); + } + + currentContext.lastOffset = data.partitionOffset; + toggleSpinner(MESSAGES_TABLE_HOLDER, MESSAGES_SPINNER); + } + + createMessageCollapseItem(fullMessage) { + return $("
    ") + .text(fullMessage); + } + + toggleContent() { + return (event) => { + const textBlock = $(event.target); + const dots = textBlock.find(".dots"); + const hiddenText = textBlock.find(".hidden-text"); + + if (dots.hasClass("hidden")) { + dots.removeClass("hidden"); + dots.addClass("text-shown"); + hiddenText.removeClass("text-shown"); + hiddenText.addClass("hidden"); + } else { + dots.removeClass("text-shown"); + dots.addClass("hidden"); + hiddenText.removeClass("hidden"); + hiddenText.addClass("text-shown"); + } + }; + } + + onMessagesFailed(data, errorType, error) { + console.error("Error getting topic messages"); + } + + requestCreateMessage() { + const topicName = currentContext.topicName; + let partition = $('#msg-modal-partition-select option:selected').val(); + if (partition === 'any') partition = null; + + let valueTextarea = $('#msg-value-textarea'); + let keyTextarea = $('#msg-key-textarea'); + const rq = { + action: "createMessage", + topic: topicName, + partition: partition, + value: valueTextarea.val(), + key: keyTextarea.val() + }; + + // TODO: print out partitions count on topics page + doPost(rq, data => { + currentContext.pagesCache = new Map(); + new Promise(this.loadMaxPageNumber) + .then(this.loadMessages) + .catch(() => errorPopUp("Failed")); + }, (data, errorType, error) => { + errorPopUp("Failed to reload messages."); + }); + } + + setActiveTab(tab) { + $('.nav-tabs button[href="#' + tab + '"]').click(); + }; + + createMessage() { + this.requestCreateMessage(); + + // Clean inputs for future reuse of modal. + $('#create-msg-modal').modal('hide'); + $('#msg-value-textarea').val(""); + $('#msg-key-textarea').val(""); + $('#msg-modal-partition-select').val("any"); + $('#msg-modal-type-select').val("text"); + + $('body').removeClass('modal-open'); + $('.modal-backdrop').remove(); + + this.setActiveTab(MODAL_KEY_TAB); + } + + clearMessageTable() { + $('#msg-table-body').empty(); + } + + redrawPageNav() { + //TODO: add GOTO page input + const previous = $(".previous"); + const next = $(".next"); + + previous.removeClass("disabled"); + next.removeClass("disabled"); + + const maxPageNumber = this.getMaxPageNumber(); + const currentPage = currentContext.currentPage; + let pages = [currentPage]; + + if (currentPage > 1) { + pages.unshift(currentPage - 1); + } + if (currentPage < maxPageNumber) { + pages.push(currentPage + 1); + } + + if (currentPage === 1) { + previous.addClass("disabled"); + if (maxPageNumber > 2) { + pages.push(currentPage + 2); + } + } + if (currentPage === maxPageNumber) { + next.addClass("disabled"); + if (maxPageNumber > 2) { + pages.unshift(currentPage - 2); + } + } + + const pagination = $("#msg-pagination"); + + // Remove all page children numbers. + while (pagination.children().length !== 2) { + pagination.children()[1].remove(); + } + + for (const p of pages) { + let a = $("") + .text("" + p) + .addClass("page-link"); + let li = $("
  • ") + .addClass("page-item") + .click(() => { + toggleSpinner(MESSAGES_TABLE_HOLDER, MESSAGES_SPINNER); + currentContext.currentPage = p; + this.getPage(p, this.onMessagesLoaded, this.onMessagesFailed); + this.redrawPageNav(); + }); + + if (p === currentPage) { + li.addClass("active"); + } + li.append(a); + + const lastPosition = pagination.children().length - 1; + li.insertBefore(".next"); + } + } + + requestOffset(topicName, order, onOffsetLoaded, onOffsetFailed, partitions) { + const req = { + action: "getOffset", + topicName: topicName, + order: order, + requestedPartitions: partitions === undefined ? this.getPartitions() : partitions + }; + doPost(req, onOffsetLoaded, onOffsetFailed); + } + + // TODO: add possibility to hide panel on the left + loadMaxPageNumber() { + const partitions = this.getPartitions(); + this.requestOffset( + currentContext.topicName, + NEW_FIRST, + (data) => { + currentContext.partitionOffset = new Map( + Object.entries(data).map(x => [parseInt(x[0]), x[1]]) + ); + this.redrawPageNav(); + }, + (data, errorType, error) => { + console.error("Error getting max page number."); + }, + partitions + ); + } + + getMaxPageNumber() { + const partitions = this.getPartitions(); + const totalElements = partitions.map(x => { + const a = currentContext.partitionOffset.get(x) + return a; + }) + .reduce((partialSum, a) => partialSum + a, 0); + return Math.max(Math.ceil(totalElements / PAGE_SIZE), 1); + } + + getOrder() { + return currentContext.currentSorting; + } + +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/navigator.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/navigator.js new file mode 100644 index 0000000000000..cd5e66615a4aa --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/navigator.js @@ -0,0 +1,175 @@ +import MessagesPage from "./messagesPage.js"; +import TopicsPage from "./topicsPage.js"; +import ConsumerGroupPage from "./consumerGroupPage.js"; +import ConsumerGroupDetailsPage from "./consumerGroupDetailsPage.js"; +import AccessControlListPage from "./accessControlListPage.js"; +import NodesPage from "./nodesPage.js"; +import {createIcon} from "../util/contentManagement.js"; + +export const pages = { + TOPICS: "topics-page", + SCHEMA: "schema-page", + CONSUMER_GROUPS: "consumer-groups-page", + CONSUMER_GROUPS_DETAILS: "consumer-groups-details-page", + ACCESS_CONTROL_LIST: "access-control-list-page", + NODES: "nodes-page", + TOPIC_MESSAGES: "topic-messages-page", + DEFAULT: "topics-page" +} + +export default class Navigator { + constructor() { + this.registerNavbar(); + } + + allPages = { + [pages.TOPICS]: { + header: "Topics", + showInNavbar: true, + instance: new TopicsPage(this, pages.TOPICS), + icon: "bi-collection" + }, + [pages.SCHEMA]: { + header: "Schema registry", + showInNavbar: true, + icon: "bi-file-code" + }, + [pages.CONSUMER_GROUPS]: { + header: "Consumer groups", + showInNavbar: true, + instance: new ConsumerGroupPage(this, pages.CONSUMER_GROUPS), + icon: "bi-inboxes" + }, + [pages.ACCESS_CONTROL_LIST]: { + header: "Access control list", + showInNavbar: true, + instance: new AccessControlListPage(pages.ACCESS_CONTROL_LIST), + icon: "bi-shield-lock" + }, + [pages.NODES]: { + header: "Nodes", + showInNavbar: true, + instance: new NodesPage(pages.NODES), + icon: "bi-diagram-3" + }, + [pages.TOPIC_MESSAGES]: { + header: "Messages", + showInNavbar: false, + instance: new MessagesPage(pages.TOPIC_MESSAGES), + parent: pages.TOPICS + }, + [pages.CONSUMER_GROUPS_DETAILS]: { + header: "Consumer group details", + showInNavbar: false, + instance: new ConsumerGroupDetailsPage(pages.CONSUMER_GROUPS_DETAILS), + parent: pages.CONSUMER_GROUPS + } + }; + + registerNavbar() { + const keys = Object.keys(this.allPages); + const navbar = $("#navbar-list"); + navbar.empty(); + + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + const value = this.allPages[key]; + if (!value.showInNavbar) continue; + const navItem = $("
  • ") + .addClass("nav-item") + .addClass("left-padding") + .addClass("pointer"); + + const navHolder = $("
    ") + .addClass("d-flex") + .addClass("left-margin") + .addClass("nav-row") + .click(() => this.navigateTo(key)); + + const icon = createIcon(value.icon) + .addClass("align-self-center"); + const navLink = $("", { + text: value.header, + href: "#" + }) + .addClass("nav-link") + .addClass("active") + .addClass("link"); + navHolder.append(icon); + navHolder.append(navLink); + navItem.append(navHolder); + navbar.append(navItem); + } + } + + navigateTo(requestedPage, params) { + const keys = Object.keys(this.allPages); + for (let i = 0; i < keys.length; i++) { + const elementName = keys[i]; + const d = $("#" + elementName); + if (d !== null) { + if (elementName !== requestedPage) { + d.removeClass("shown") + .addClass("hidden"); + } else { + d.removeClass("hidden") + .addClass("shown"); + this.open(requestedPage, params); + } + } else { + console.error("Can not find page div: ", keys[i]); + } + } + + this.navigateBreadcrumb(requestedPage, params); + } + + navigateToDefaultPage() { + this.navigateTo(pages.DEFAULT); + } + + open(pageId, params) { + const value = this.allPages[pageId]; + value.instance.open(params); + } + + navigateBreadcrumb(page, params) { + const breadcrumb = $("#nav-breadcrumb"); + breadcrumb.empty(); + + let nextPage = this.allPages[page]; + let pageId = page; + + let i = 0; + while (nextPage !== undefined) { + let li; + // We only need to append possible params to the very first element. + if (i === 0) { + li = this.createBreadcrumbItem(nextPage.header, pageId, true, params); + } else { + li = this.createBreadcrumbItem(nextPage.header, pageId, false); + } + breadcrumb.prepend(li); + pageId = nextPage.parent; + nextPage = this.allPages[pageId]; + i++; + } + } + + createBreadcrumbItem(text, pageId, isActive, params) { + let breadcrumbText = text; + if (params !== undefined && params.length > 0 && (params[0] !== null && params[0] !== undefined)) { + breadcrumbText = text + " (" + params[0] + ")"; + } + const a = $("", {href: "#", text: breadcrumbText}) + .click(() => this.navigateTo(pageId, params)); + if (isActive) { + a.addClass("active"); + } + + const li = $("
  • ") + .addClass("breadcrumb-item"); + li.append(a); + return li; + } +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/nodesPage.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/nodesPage.js new file mode 100644 index 0000000000000..94b2b1e6a270d --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/nodesPage.js @@ -0,0 +1,47 @@ +import {doPost, errorPopUp} from "../web/web.js"; +import {createTableItem} from "../util/contentManagement.js"; +import {toggleSpinner} from "../util/spinner.js"; + +export default class NodesPage { + constructor(containerId) { + this.containerId = containerId; + Object.getOwnPropertyNames(NodesPage.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + open() { + const req = { + action: "getInfo" + }; + doPost(req, (data) => { + setTimeout(() => { + this.updateInfo(data); + toggleSpinner(this.containerId); + }, 2000); + }, data => { + errorPopUp("Error getting Kafka info: ", data); + }); + toggleSpinner(this.containerId); + } + + updateInfo(data) { + $('#cluster-id').html(data.clusterInfo.id); + $('#cluster-controller').html(data.broker); + $('#cluster-acl').html(data.clusterInfo.aclOperations); + + const nodes = data.clusterInfo.nodes; + let clusterNodesTable = $('#cluster-table tbody'); + clusterNodesTable.empty(); + for (let i = 0; i < nodes.length; i++) { + const d = nodes[i]; + let tableRow = $("
  • "); + tableRow.append(createTableItem(d.id)); + tableRow.append(createTableItem(d.host)); + tableRow.append(createTableItem(d.port)); + clusterNodesTable.append(tableRow); + } + } +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/schemaPage.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/schemaPage.js new file mode 100644 index 0000000000000..82b3f5f8d108c --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/schemaPage.js @@ -0,0 +1,16 @@ +export default class SchemaPage{ + constructor(containerId) { + this.containerId = containerId; + Object.getOwnPropertyNames(SchemaPage.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + // TODO: stub. must be implemented by all pages + open(){ + + } + +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/topicsPage.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/topicsPage.js new file mode 100644 index 0000000000000..28156717578c8 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/pages/topicsPage.js @@ -0,0 +1,188 @@ +import {doPost, errorPopUp} from "../web/web.js"; +import {createIcon, createTableItem, createTableItemHtml, hideItem, showItem} from "../util/contentManagement.js"; +import {pages} from "./navigator.js"; + +export default class TopicsPage { + constructor(navigator, containerId) { + this.navigator = navigator; + this.containerId = containerId; + this.registerButtonHandlers(); + + // TODO: move to common function with comment + Object.getOwnPropertyNames(TopicsPage.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + open() { + window.currentContext = {}; + this.requestTopics(this.onTopicsLoaded, this.onTopicsFailed); + } + + registerButtonHandlers() { + + const topicNameInput = $("#topic-name-modal-input"); + $("#create-topic-btn").click(() => { + if (!this.validateTopicName(topicNameInput.val())) { + this.showErrorIfInvalid(topicNameInput.val(), this.validateTopicName, topicNameValidationErrorBox); + return; + } + + this.createTopic(this.onTopicsLoaded, this.onTopicsFailed); + $('#create-topic-modal').modal('hide'); + $('#topic-name-modal-input').val(""); + $('#partitions-modal-input').val(""); + $('#replications-modal-input').val(""); + }) + + $("#open-create-topic-modal-btn").click(() => { + this.loadNodesCount(); + $('#create-topic-modal').modal('show'); + }); + + $('.close-modal-btn').click(() => { + hideItem($(".modal")); + hideItem($("#topic-creation-validation-msg-box")); + hideItem($("#topic-name-validation-msg")); + hideItem($("#replication-validation-msg")); + }); + + $("#delete-topic-btn").click(() => { + const currentTopic = window.currentContext.topicName; + this.deleteTopic(currentTopic, this.deleteTopicRow, this.onTopicsFailed) + $("#delete-topic-modal").modal("hide"); + }); + + const topicNameValidationErrorBox = $("#topic-name-validation-msg"); + topicNameInput.keyup(() => this.showErrorIfInvalid(topicNameInput.val(), this.validateTopicName, topicNameValidationErrorBox)); + topicNameInput.change(() => this.showErrorIfInvalid(topicNameInput.val(), this.validateTopicName, topicNameValidationErrorBox)); + + const replicationInput = $("#replications-modal-input"); + replicationInput.keyup(() => { + const value = replicationInput.val(); + this.showErrorIfInvalid(value, this.validateReplicationFactor, $("#replication-validation-msg")); + }); + } + + loadNodesCount() { + const req = { + action: "getInfo" + }; + doPost(req, (data) => { + window.currentContext.nodesCount = data.clusterInfo.nodes.length; + }, data => { + errorPopUp("Could not obtain nodes count."); + }); + } + + showErrorIfInvalid(value, validationFunction, errBoxSelector) { + const valid = validationFunction(value); + if (!valid) { + showItem($("#topic-creation-validation-msg-box")); + showItem(errBoxSelector); + $("#create-topic-btn") + .addClass("disabled") + .attr("disabled", true); + } else { + hideItem(errBoxSelector); + const topicMsgValidationBoxChildren = $("#topic-creation-validation-msg-box span"); + const allChildrenHidden = topicMsgValidationBoxChildren + .filter((x) => !$(x).hasClass("hidden")) + .length > 0; + if (allChildrenHidden) { + hideItem($("#topic-creation-validation-msg-box")); + $("#create-topic-btn") + .removeClass("disabled") + .attr("disabled", false); + } + } + } + + validateTopicName(name) { + const legalChars = /^[a-zA-Z\d\.\_]+$/; + const maxNameLength = 255; + return legalChars.test(name) && name.length < maxNameLength; + } + + validateReplicationFactor(replicationFactor) { + return currentContext.nodesCount >= replicationFactor; + } + + requestTopics(onTopicsLoaded, onTopicsFailed) { + const req = { + action: "getTopics" + }; + doPost(req, onTopicsLoaded, onTopicsFailed); + } + + onTopicsLoaded(data) { + let tableBody = $('#topics-table tbody'); + tableBody.empty(); + + for (let i = 0; i < data.length; i++) { + let tableRow = $(""); + let d = data[i]; + tableRow.append(createTableItem(d.name)); + tableRow.append(createTableItem(d.topicId)); + tableRow.append(createTableItem(d.partitionsCount)); + tableRow.append(createTableItem(("" + d.nmsg))); + + const deleteIcon = createIcon("bi-trash-fill"); + const deleteBtn = $("") + .addClass("btn") + .click((event) => { + window.currentContext.topicName = d.name; + $("#delete-topic-modal").modal("show"); + $("#delete-topic-name-span").text(d.name); + event.stopPropagation(); + }) + .append(deleteIcon); + + + tableRow.click(() => { + self.navigator.navigateTo(pages.TOPIC_MESSAGES, [d.name]); + }); + const controlHolder = $("
    ") + .append(deleteBtn); + tableRow.append(createTableItemHtml(controlHolder)); + + const self = this; + + tableBody.append(tableRow); + } + } + + onTopicsFailed(data) { + errorPopUp("Error getting topics: ", data); + } + + createTopic(onTopicsLoaded, onTopicsFailed) { + const topicName = $("#topic-name-modal-input").val(); + const partitions = $("#partitions-modal-input").val(); + const replications = $("#replications-modal-input").val(); + + const req = { + action: "createTopic", + topicName: topicName, + partitions: partitions, + replications: replications + }; + doPost(req, () => this.requestTopics(this.onTopicsLoaded, this.onTopicsFailed), onTopicsFailed); + } + + // TODO: add pagination here + deleteTopic(topicName, onTopicsDeleted, onTopicsFailed) { + const req = { + action: "deleteTopic", + key: topicName + }; + doPost(req, onTopicsDeleted, onTopicsFailed); + } + + deleteTopicRow(data) { + const topicName = window.currentContext.topicName; + $("#topics-table > tbody > tr > td:contains('" + topicName + "')").parent().remove() + } +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/contentManagement.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/contentManagement.js new file mode 100644 index 0000000000000..d9f87034ab8dd --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/contentManagement.js @@ -0,0 +1,75 @@ +export function createTableItem(text) { + return $("
    ").append(createTableItemHtml( + collapseContent + .addClass("collapse-content")) + .attr("colspan", tableWidth)) + .attr("id", this.collapseId) + .addClass("collapse"); + } + + collapse() { + $("#" + this.collapseId).toggle(); + if (this.arrow.hasClass("icon-rotated")) { + this.arrow.removeClass("icon-rotated"); + } else { + this.arrow.addClass("icon-rotated"); + } + } +} + +export function showItem(selector){ + selector.addClass("shown") + .removeClass("hidden"); +} + +export function hideItem(selector){ + selector.addClass("hidden") + .removeClass("shown"); +} + +export function toggleItem(selector) { + if (selector.hasClass("shown")) { + hideItem(selector); + } else { + showItem(selector); + } +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/datetimeUtil.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/datetimeUtil.js new file mode 100644 index 0000000000000..384d693bde861 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/datetimeUtil.js @@ -0,0 +1,17 @@ +function addTrailingZero(data) { + if (data < 10) { + return "0" + data; + } + return data; +} + +export default function timestampToFormattedString(UNIX_timestamp) { + const a = new Date(UNIX_timestamp); + const year = a.getFullYear(); + const month = addTrailingZero(a.getMonth()); + const date = addTrailingZero(a.getDate()); + const hour = addTrailingZero(a.getHours()); + const min = addTrailingZero(a.getMinutes()); + const sec = addTrailingZero(a.getSeconds()); + return date + '/' + month + '/' + year + ' ' + hour + ':' + min + ':' + sec; +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/spinner.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/spinner.js new file mode 100644 index 0000000000000..a5ca80594e44f --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/util/spinner.js @@ -0,0 +1,21 @@ +export function toggleSpinner(containerId, spinnerContainerId) { + const spinnerId = spinnerContainerId === undefined ? "#page-load-spinner" : "#" + spinnerContainerId; + const toggleContainerId = "#" + containerId; + let first; + let second; + + if ($(spinnerId).hasClass("shown")) { + first = toggleContainerId; + second = spinnerId; + } else { + second = toggleContainerId; + first = spinnerId; + } + + $(first) + .removeClass("hidden") + .addClass("shown"); + $(second) + .addClass("hidden") + .removeClass("shown"); +} \ No newline at end of file diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-static/js/web/web.js b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/web/web.js new file mode 100644 index 0000000000000..6ba79b5c19720 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-static/js/web/web.js @@ -0,0 +1,22 @@ +import {api} from "../config.js" + +export function doPost(data, successCallback, errorCallback) { + $.ajax({ + url: api, + type: 'POST', + data: JSON.stringify(data), + contentType: "application/json; charset=utf-8", + dataType: 'json', + context: this, + success: (data) => successCallback(data), + error: (data, errorType, errorObj) => errorCallback(data, errorType, errorObj) + }); +} + +export function errorPopUp() { + let message = ""; + for (let i = 0; i < arguments.length; i++) { + message += arguments[i] + " "; + } + alert(message); +} diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-templates/embedded.html b/extensions/kafka-client/deployment/src/main/resources/dev-templates/embedded.html new file mode 100644 index 0000000000000..dac4cf81f4150 --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-templates/embedded.html @@ -0,0 +1,3 @@ + + + Kafka UI diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-templates/kafka-dev-ui.html b/extensions/kafka-client/deployment/src/main/resources/dev-templates/kafka-dev-ui.html new file mode 100644 index 0000000000000..f07f33984441f --- /dev/null +++ b/extensions/kafka-client/deployment/src/main/resources/dev-templates/kafka-dev-ui.html @@ -0,0 +1,513 @@ +{#include main fluid=true} +{#style} +html { +min-height: 90vh; +min-width: 100vh; +} + +body { +min-height: 90vh; +min-width: 100vh; +} + +.row-holder { +padding: 0; +margin: 0; + +} + +.row:after { +content: ""; +display: table; +clear: both; +} + +.content-holder { +height: auto; +min-height: 90vh; +} + +.link { +background: none; +border: none; +} + +.top-margin { +margin-top: 1em; +} + +.left-margin { +margin-left: 1em; +} + +.left-padding { +padding-left: 1em; +} + +.shown { +display: flex; +height: auto; +min-width: 100%; +} + +.text-shown { +display: inline; +} + +.hidden { +display: none +} + +.nav-item:hover > .nav-row > a { +background-color: #005fff; +color: #e9ecef; +} + +.nav-item:hover > .nav-row > i { +background-color: #005fff; +color: #e9ecef; +} + +#navbar-list > .nav-item:hover { +background-color: #005fff; +color: #e9ecef; +} + +.table-hover:hover { +cursor: pointer; +} + +.multiselect-container > li > a > label { +padding-left: 15px !important; +} + +.page { +min-height: calc(100vh - 135px); +} + +.table-hover:hover { +cursor: pointer; +} + +.pointer { +cursor: pointer; +} + +.no-hover { +background-color: white; +cursor: default; +} + +.no-hover:hover { +background-color: white !important; +cursor: default; +} + +.icon-rotated { +transform: rotate(90deg); +} + +.navbar-brand img { +border-right: 1px solid darkgrey; +padding-right: 10px; +margin-right: 5px; +} + +.navbar-brand { +padding: 0; +margin: 0; +} + +#nav-menu-panel { +padding: 0px; +} + +.float-plus-btn { +position: fixed; +bottom: 60px; +right: 60px; +border-radius: 100%; +height: 50px; +width: 50px; +} + +.breadcrumb-item::before { +float: left; +padding-right: 0.5rem; +color: #007bff; +content: "〉"; +} + +.breadcrumb-item + .breadcrumb-item::before { +float: left; +padding-right: 0.5rem; +color: #007bff; +content: "〉"; +} + +.breadcrumb { +background-color: #343a40; +margin-bottom: 0; +padding: 0 0 0 5px; +} + +.bi-trash-fill:hover { +color: #007bff; +} + +.collapse-content { +max-width: 1200px; +} + +.thead-multiselect { +background-color: #343a40; +color: white; +border: 0px; +font-weight: bold; +} + +.thead-text { +color: white; +} + +#msg-table-holder { +min-width: 100%; +} +{/style} +{#styleref} + + +{/styleref} +{#scriptref} + + +{/scriptref} +{#title}Kafka Dev UI{/title} +{#body} +
    + + + + + + +
    +
    +
    +
    ", { + text: text + }); +} + +export function createTableItemHtml(html) { + return $("").append(html); +} + +export function createTableHead(title) { + return $("") + .attr("scope", "col") + .text(title); +} + +export function createIcon(iconClass) { + return $("") + .addClass("bi") + .addClass(iconClass); +} + +export class CollapseRow { + constructor(collapseId) { + this.collapseId = collapseId; + const chevronIcon = createIcon("bi-chevron-right") + .addClass("rotate-icon"); + this.arrow = $("
    ") + .addClass("d-flex") + .addClass("justify-content-center") + .append(chevronIcon); + + Object.getOwnPropertyNames(CollapseRow.prototype).forEach((key) => { + if (key !== 'constructor') { + this[key] = this[key].bind(this); + } + }); + } + + getCollapseContent(tableWidth, collapseContent) { + return $("
    + + + + + + + + + + + +
    Topic NameIdPartitions countNumber of msg
    + + + +

    +
    +
    +
    + + + + + + + + + + + + + + +
    Offset + + Timestamp KeyValue
    +
    + +
    +
    + + +
    +
    +
    + + + + + + + + + + + + + +
    StateIdCoordinatorProtocolMembersLag(Sum)
    +
    +
    +
    +
    + + + + + + + + + + + + +
    Member IDHostPartitionsLag(Sum)
    +
    +
    +
    +
    +
    + Kafka cluster id: 
    + Controller node (broker): 
    + ACL operations: 
    +
    +
    +

    Cluster nodes

    +
    + + + + + + + + + + +
    IdHostPort
    +
    +
    + + + +{/body} +{/include} \ No newline at end of file diff --git a/extensions/kafka-client/runtime/pom.xml b/extensions/kafka-client/runtime/pom.xml index 50ac39873257c..1d237acadf213 100644 --- a/extensions/kafka-client/runtime/pom.xml +++ b/extensions/kafka-client/runtime/pom.xml @@ -59,6 +59,11 @@ provided
    + + io.quarkus + quarkus-vertx-http-dev-console-runtime-spi + + io.quarkus quarkus-junit5-internal diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/health/KafkaHealthCheck.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/health/KafkaHealthCheck.java index a0b7c6648caa7..e9b9a24bd265d 100644 --- a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/health/KafkaHealthCheck.java +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/health/KafkaHealthCheck.java @@ -1,43 +1,23 @@ package io.quarkus.kafka.client.health; -import java.util.HashMap; -import java.util.Map; - -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.common.Node; import org.eclipse.microprofile.health.HealthCheck; import org.eclipse.microprofile.health.HealthCheckResponse; import org.eclipse.microprofile.health.HealthCheckResponseBuilder; import org.eclipse.microprofile.health.Readiness; -import io.smallrye.common.annotation.Identifier; +import io.quarkus.kafka.client.runtime.KafkaAdminClient; @Readiness @ApplicationScoped public class KafkaHealthCheck implements HealthCheck { - @Inject - @Identifier("default-kafka-broker") - Map config; - - private AdminClient client; - - @PostConstruct - void init() { - Map conf = new HashMap<>(config); - conf.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "5000"); - client = AdminClient.create(conf); - } + KafkaAdminClient kafkaAdminClient; - @PreDestroy - void stop() { - client.close(); + public KafkaHealthCheck(KafkaAdminClient kafkaAdminClient) { + this.kafkaAdminClient = kafkaAdminClient; } @Override @@ -45,7 +25,7 @@ public HealthCheckResponse call() { HealthCheckResponseBuilder builder = HealthCheckResponse.named("Kafka connection health check").up(); try { StringBuilder nodes = new StringBuilder(); - for (Node node : client.describeCluster().nodes().get()) { + for (Node node : kafkaAdminClient.getCluster().nodes().get()) { if (nodes.length() > 0) { nodes.append(','); } diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/KafkaAdminClient.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/KafkaAdminClient.java new file mode 100644 index 0000000000000..c9b75dc1d00c0 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/KafkaAdminClient.java @@ -0,0 +1,84 @@ +package io.quarkus.kafka.client.runtime; + +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + +import org.apache.kafka.clients.admin.*; +import org.apache.kafka.common.acl.AccessControlEntryFilter; +import org.apache.kafka.common.acl.AclBinding; +import org.apache.kafka.common.acl.AclBindingFilter; +import org.apache.kafka.common.resource.ResourcePatternFilter; + +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaCreateTopicRequest; +import io.smallrye.common.annotation.Identifier; + +@ApplicationScoped +public class KafkaAdminClient { + private static final int DEFAULT_ADMIN_CLIENT_TIMEOUT = 5000; + + @Inject + @Identifier("default-kafka-broker") + Map config; + + private AdminClient client; + + @PostConstruct + void init() { + Map conf = new HashMap<>(config); + conf.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, DEFAULT_ADMIN_CLIENT_TIMEOUT); + client = AdminClient.create(conf); + } + + @PreDestroy + void stop() { + client.close(); + } + + public DescribeClusterResult getCluster() { + return client.describeCluster(); + } + + public Collection getTopics() throws InterruptedException, ExecutionException { + return client.listTopics().listings().get(); + } + + public Collection getConsumerGroups() throws InterruptedException, ExecutionException { + var consumerGroupIds = client.listConsumerGroups().all().get().stream() + .map(ConsumerGroupListing::groupId) + .collect(Collectors.toList()); + return client.describeConsumerGroups(consumerGroupIds).all().get() + .values(); + } + + public boolean deleteTopic(String name) { + Collection topics = new ArrayList<>(); + topics.add(name); + DeleteTopicsResult dtr = client.deleteTopics(topics); + return dtr.topicNameValues() != null; + } + + public boolean createTopic(KafkaCreateTopicRequest kafkaCreateTopicRq) { + var partitions = Optional.ofNullable(kafkaCreateTopicRq.getPartitions()).orElse(1); + var replications = Optional.ofNullable(kafkaCreateTopicRq.getReplications()).orElse((short) 1); + var newTopic = new NewTopic(kafkaCreateTopicRq.getTopicName(), partitions, replications); + + CreateTopicsResult ctr = client.createTopics(List.of(newTopic)); + return ctr.values() != null; + } + + public ListConsumerGroupOffsetsResult listConsumerGroupOffsets(String groupId) { + return client.listConsumerGroupOffsets(groupId); + } + + public Collection getAclInfo() throws InterruptedException, ExecutionException { + AclBindingFilter filter = new AclBindingFilter(ResourcePatternFilter.ANY, AccessControlEntryFilter.ANY); + var options = new DescribeAclsOptions().timeoutMs(1_000); + return client.describeAcls(filter, options).values().get(); + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/KafkaRuntimeConfigProducer.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/KafkaRuntimeConfigProducer.java index 2be14e5717251..93e2ca309ab99 100644 --- a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/KafkaRuntimeConfigProducer.java +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/KafkaRuntimeConfigProducer.java @@ -17,7 +17,7 @@ public class KafkaRuntimeConfigProducer { // not "kafka.", because we also inspect env vars, which start with "KAFKA_" private static final String CONFIG_PREFIX = "kafka"; - + private static final String UI_CONFIG_PREFIX = CONFIG_PREFIX + ".ui"; private static final String GROUP_ID = "group.id"; @Produces @@ -29,7 +29,10 @@ public Map createKafkaRuntimeConfig(Config config, ApplicationCo for (String propertyName : config.getPropertyNames()) { String propertyNameLowerCase = propertyName.toLowerCase(); - if (!propertyNameLowerCase.startsWith(CONFIG_PREFIX)) { + if (propertyNameLowerCase.startsWith(UI_CONFIG_PREFIX)) { + config.getOptionalValue(propertyName, String.class).orElse(""); + } + if (!propertyNameLowerCase.startsWith(CONFIG_PREFIX) || propertyNameLowerCase.startsWith(UI_CONFIG_PREFIX)) { continue; } // Replace _ by . - This is because Kafka properties tend to use . and env variables use _ for every special diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/AbstractHttpRequestHandler.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/AbstractHttpRequestHandler.java new file mode 100644 index 0000000000000..ede6bc54f21f8 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/AbstractHttpRequestHandler.java @@ -0,0 +1,76 @@ +package io.quarkus.kafka.client.runtime.ui; + +import io.quarkus.arc.Arc; +import io.quarkus.arc.ManagedContext; +import io.vertx.core.Handler; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.ext.web.RoutingContext; + +public abstract class AbstractHttpRequestHandler implements Handler { + private final ManagedContext currentManagedContext; + private final Handler currentManagedContextTerminationHandler; + + public AbstractHttpRequestHandler() { + this.currentManagedContext = Arc.container().requestContext(); + this.currentManagedContextTerminationHandler = e -> currentManagedContext.terminate(); + } + + @Override + @SuppressWarnings("unchecked") // ignore currentManagedContextTerminationHandler types, just use Object + public void handle(final RoutingContext ctx) { + + if (currentManagedContext.isActive()) { + doHandle(ctx); + } else { + + currentManagedContext.activate(); + ctx.response() + .endHandler(currentManagedContextTerminationHandler) + .exceptionHandler(currentManagedContextTerminationHandler) + .closeHandler(currentManagedContextTerminationHandler); + + try { + doHandle(ctx); + } catch (Throwable t) { + currentManagedContext.terminate(); + throw t; + } + } + } + + public void doHandle(RoutingContext ctx) { + try { + HttpServerRequest request = ctx.request(); + + switch (request.method().name()) { + case "OPTIONS": + handleOptions(ctx); + break; + case "POST": + handlePost(ctx); + break; + case "GET": + handleGet(ctx); + break; + default: + ctx.next(); + break; + } + } catch (Exception e) { + ctx.fail(e); + } + } + + public abstract void handlePost(RoutingContext event); + + public abstract void handleGet(RoutingContext event); + + public abstract void handleOptions(RoutingContext event); + + protected String getRequestPath(RoutingContext event) { + HttpServerRequest request = event.request(); + return request.path(); + } + + //TODO: service methods for HTTP requests +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaTopicClient.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaTopicClient.java new file mode 100644 index 0000000000000..174ef04aa08b8 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaTopicClient.java @@ -0,0 +1,276 @@ +package io.quarkus.kafka.client.runtime.ui; + +import static io.quarkus.kafka.client.runtime.ui.util.ConsumerFactory.createConsumer; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; +import java.util.stream.Collectors; + +import javax.annotation.PostConstruct; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.TopicPartitionInfo; +import org.apache.kafka.common.serialization.BytesSerializer; +import org.apache.kafka.common.utils.Bytes; + +import io.quarkus.kafka.client.runtime.ui.model.Order; +import io.quarkus.kafka.client.runtime.ui.model.converter.KafkaModelConverter; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaMessageCreateRequest; +import io.quarkus.kafka.client.runtime.ui.model.response.KafkaMessagePage; +import io.smallrye.common.annotation.Identifier; + +@ApplicationScoped +public class KafkaTopicClient { + // TODO: make configurable + private static final int RETRIES = 3; + + //TODO: inject me + private AdminClient adminClient; + + KafkaModelConverter modelConverter = new KafkaModelConverter(); + + @Inject + @Identifier("default-kafka-broker") + Map config; + + @PostConstruct + void init() { + Map conf = new HashMap<>(config); + conf.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "5000"); + adminClient = AdminClient.create(conf); + } + + private Producer createProducer() { + Map config = new HashMap<>(this.config); + + config.put(ProducerConfig.CLIENT_ID_CONFIG, "kafka-ui-producer-" + UUID.randomUUID()); + // TODO: make generic to support AVRO serializer + config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, BytesSerializer.class.getName()); + config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, BytesSerializer.class.getName()); + + return new KafkaProducer<>(config); + } + + /** + * Reads the messages from particular topic. Offset for next page is returned within response. + * The first/last page offset could be retrieved with + * {@link KafkaTopicClient#getPagePartitionOffset(String, Collection, Order)} + * method. + * + * @param topicName topic to read messages from + * @param order ascending or descending. Defaults to descending (newest first) + * @param partitionOffsets the offset for page to be read + * @param pageSize size of read page + * @return page of messages, matching requested filters + */ + public KafkaMessagePage getTopicMessages( + String topicName, + Order order, + Map partitionOffsets, + int pageSize) + throws ExecutionException, InterruptedException { + assertParamsValid(pageSize, partitionOffsets); + + var requestedPartitions = partitionOffsets.keySet(); + assertRequestedPartitionsExist(topicName, requestedPartitions); + if (order == null) + order = Order.OLD_FIRST; + + var allPartitionsResult = getConsumerRecords(topicName, order, pageSize, requestedPartitions, partitionOffsets, + pageSize); + + Comparator> comparator = Comparator.comparing(ConsumerRecord::timestamp); + if (Order.NEW_FIRST == order) + comparator = comparator.reversed(); + allPartitionsResult.sort(comparator); + + // We might have too many values. Throw away newer items, which don't fit into page. + if (allPartitionsResult.size() > pageSize) { + allPartitionsResult = allPartitionsResult.subList(0, pageSize); + } + + var newOffsets = calculateNewPartitionOffset(partitionOffsets, allPartitionsResult, order, topicName); + var convertedResult = allPartitionsResult.stream() + .map(modelConverter::convert) + .collect(Collectors.toList()); + return new KafkaMessagePage(newOffsets, convertedResult); + } + + // Fail fast on wrong params, even before querying Kafka. + private void assertParamsValid(int pageSize, Map partitionOffsets) { + if (pageSize <= 0) + throw new IllegalArgumentException("Page size must be > 0."); + + if (partitionOffsets == null || partitionOffsets.isEmpty()) + throw new IllegalArgumentException("Partition offset map must be specified."); + + for (var partitionOffset : partitionOffsets.entrySet()) { + if (partitionOffset.getValue() < 0) + throw new IllegalArgumentException( + "Partition offset must be > 0."); + } + } + + private ConsumerRecords pollWhenReady(Consumer consumer) { + var attempts = 0; + var pullDuration = Duration.of(100, ChronoUnit.MILLIS); + var result = consumer.poll(pullDuration); + + while (result.isEmpty() && attempts < RETRIES) { + result = consumer.poll(pullDuration); + attempts++; + } + return result; + } + + /* + * FIXME: should consider compaction strategy, when our new offset not necessary = old + total records read, but some + * records might be deleted, so we'll end up seeing duplicates on some pages. + * Imagine this case: + * - page size = 10 + * - 30 messages pushed, value is incremental 1 ... 30. + * - message 10 gets removed, as message 15 has same key because of compaction + * - we request page 1. it had offset 0. we return values [1, 2, 3, ..., 9, 11], total of 10. We get new offset for page 2 = + * 0 + totalRecords = 10. + * - we request page 2. we read starting from offset = 10. There is no message with that offset, but we see message 11 again + * instead. + */ + private Map calculateNewPartitionOffset(Map oldPartitionOffset, + Collection> records, Order order, String topicName) { + var newOffsets = records.stream().map(ConsumerRecord::partition) + .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())); + + var newPartitionOffset = new HashMap(); + for (var partition : oldPartitionOffset.keySet()) { + // We should add in case we seek for oldest and reduce for newest. + var multiplier = Order.OLD_FIRST == order ? 1 : -1; + + // If new offset for partition is not there in the map - we didn't have records for that partition. So, just take the old offset. + var newOffset = oldPartitionOffset.get(partition) + multiplier * newOffsets.getOrDefault(partition, 0L); + newPartitionOffset.put(partition, newOffset); + } + return newPartitionOffset; + } + + private long getPosition(String topicName, int partition, Order order) { + try (var consumer = createConsumer(topicName, partition, this.config)) { + var topicPartition = new TopicPartition(topicName, partition); + if (Order.NEW_FIRST == order) { + consumer.seekToEnd(List.of(topicPartition)); + } else { + consumer.seekToBeginning(List.of(topicPartition)); + } + return consumer.position(topicPartition); + } + } + + public Map getPagePartitionOffset(String topicName, Collection requestedPartitions, Order order) + throws ExecutionException, InterruptedException { + assertRequestedPartitionsExist(topicName, requestedPartitions); + + var result = new HashMap(); + for (var requestedPartition : requestedPartitions) { + var maxPosition = getPosition(topicName, requestedPartition, order); + result.put(requestedPartition, maxPosition); + } + + return result; + } + + private List> getConsumerRecords(String topicName, Order order, int pageSize, + Collection requestedPartitions, Map start, int totalMessages) { + List> allPartitionsResult = new ArrayList<>(); + + // Requesting a full page from each partition and then filtering out redundant data. Thus, we'll ensure, we read data in historical order. + for (var requestedPartition : requestedPartitions) { + List> partitionResult = new ArrayList<>(); + var offset = start.get(requestedPartition); + try (var consumer = createConsumer(topicName, requestedPartition, this.config)) { + // Move pointer to currently read position. It might be different per partition, so requesting with offset per partition. + var partition = new TopicPartition(topicName, requestedPartition); + + var seekedOffset = Order.OLD_FIRST == order ? offset : Long.max(offset - pageSize, 0); + consumer.seek(partition, seekedOffset); + + var numberOfMessagesReadSoFar = 0; + var keepOnReading = true; + + while (keepOnReading) { + var records = pollWhenReady(consumer); + if (records.isEmpty()) + keepOnReading = false; + + for (var record : records) { + numberOfMessagesReadSoFar++; + partitionResult.add(record); + + if (numberOfMessagesReadSoFar >= totalMessages) { + keepOnReading = false; + break; + } + } + } + // We need to cut off result, if it was reset to 0, as we don't want see entries from old pages. + if (Order.NEW_FIRST == order && seekedOffset == 0 && partitionResult.size() > offset.intValue()) { + partitionResult.sort(Comparator.comparing(ConsumerRecord::timestamp)); + partitionResult = partitionResult.subList(0, offset.intValue()); + } + + } + allPartitionsResult.addAll(partitionResult); + } + return allPartitionsResult; + } + + private void assertRequestedPartitionsExist(String topicName, Collection requestedPartitions) + throws InterruptedException, ExecutionException { + var topicPartitions = partitions(topicName); + + if (!new HashSet<>(topicPartitions).containsAll(requestedPartitions)) { + throw new IllegalArgumentException(String.format( + "Requested messages from partition, that do not exist. Requested partitions: %s. Existing partitions: %s", + requestedPartitions, topicPartitions)); + } + } + + public void createMessage(KafkaMessageCreateRequest request) { + var record = new ProducerRecord<>(request.getTopic(), request.getPartition(), Bytes.wrap(request.getKey().getBytes()), + Bytes.wrap(request.getValue().getBytes()) + //TODO: support headers + ); + + try (var producer = createProducer()) { + producer.send(record); + } + } + + public List partitions(String topicName) throws ExecutionException, InterruptedException { + return adminClient.describeTopics(List.of(topicName)) + .allTopicNames() + .get() + .values().stream() + .reduce((a, b) -> { + throw new IllegalStateException( + "Requested info about single topic, but got result of multiple: " + a + ", " + b); + }) + .orElseThrow(() -> new IllegalStateException( + "Requested info about a topic, but nothing found. Topic name: " + topicName)) + .partitions().stream() + .map(TopicPartitionInfo::partition) + .collect(Collectors.toList()); + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiHandler.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiHandler.java new file mode 100644 index 0000000000000..8b7d916de765b --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiHandler.java @@ -0,0 +1,131 @@ + +package io.quarkus.kafka.client.runtime.ui; + +import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; +import static io.netty.handler.codec.http.HttpResponseStatus.OK; + +import java.util.concurrent.ExecutionException; + +import io.netty.handler.codec.http.HttpResponseStatus; +import io.quarkus.arc.Arc; +import io.quarkus.kafka.client.runtime.KafkaAdminClient; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaCreateTopicRequest; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaMessageCreateRequest; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaMessagesRequest; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaOffsetRequest; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.ext.web.RoutingContext; + +public class KafkaUiHandler extends AbstractHttpRequestHandler { + + @Override + public void handlePost(RoutingContext event) { + if (event.body() == null) { + endResponse(event, BAD_REQUEST, "Request body is null"); + return; + } + var body = event.body().asJsonObject(); + if (body == null) { + endResponse(event, BAD_REQUEST, "Request JSON body is null"); + return; + } + var action = body.getString("action"); + + var message = "OK"; + var error = ""; + + var webUtils = kafkaWebUiUtils(); + var adminClient = kafkaAdminClient(); + + boolean res = false; + if (null != action) { + try { + switch (action) { + case "getInfo": + message = webUtils.toJson(webUtils.getKafkaInfo()); + res = true; + break; + case "getAclInfo": + message = webUtils.toJson(webUtils.getAclInfo()); + res = true; + break; + case "createTopic": + var topicCreateRq = event.body().asPojo(KafkaCreateTopicRequest.class); + res = adminClient.createTopic(topicCreateRq); + message = webUtils.toJson(webUtils.getTopics()); + break; + case "deleteTopic": + res = adminClient.deleteTopic(body.getString("key")); + message = "{}"; + res = true; + break; + case "getTopics": + message = webUtils.toJson(webUtils.getTopics()); + res = true; + break; + case "topicMessages": + var msgRequest = event.body().asPojo(KafkaMessagesRequest.class); + message = webUtils.toJson(webUtils.getMessages(msgRequest)); + res = true; + break; + case "getOffset": + var request = event.body().asPojo(KafkaOffsetRequest.class); + message = webUtils.toJson(webUtils.getOffset(request)); + res = true; + break; + case "createMessage": + var rq = event.body().asPojo(KafkaMessageCreateRequest.class); + webUtils.createMessage(rq); + message = "{}"; + res = true; + break; + case "getPartitions": + var topicName = body.getString("topicName"); + message = webUtils.toJson(webUtils.partitions(topicName)); + res = true; + break; + default: + break; + } + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + } catch (ExecutionException ex) { + throw new RuntimeException(ex); + } + } + + if (res) { + endResponse(event, OK, message); + } else { + message = "ERROR: " + error; + endResponse(event, BAD_REQUEST, message); + } + } + + private void endResponse(RoutingContext event, HttpResponseStatus status, String message) { + event.response().setStatusCode(status.code()); + event.response().end(message); + } + + private KafkaUiUtils kafkaWebUiUtils() { + return Arc.container().instance(KafkaUiUtils.class).get(); + } + + @Override + public void handleGet(RoutingContext event) { + //TODO: move pure get requests processing here + HttpServerRequest request = event.request(); + String path = request.path(); + endResponse(event, OK, "GET method is not supported yet. Path is: " + path); + } + + @Override + public void handleOptions(RoutingContext event) { + endResponse(event, OK, "OPTION method is not supported yet"); + } + + private KafkaAdminClient kafkaAdminClient() { + return Arc.container().instance(KafkaAdminClient.class).get(); + } + +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiRecorder.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiRecorder.java new file mode 100644 index 0000000000000..90afe8521cc11 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiRecorder.java @@ -0,0 +1,17 @@ +package io.quarkus.kafka.client.runtime.ui; + +import io.quarkus.runtime.annotations.Recorder; +import io.vertx.core.Handler; +import io.vertx.ext.web.RoutingContext; + +/** + * Handles requests from kafka UI and html/js of UI + */ +@Recorder +public class KafkaUiRecorder { + + public Handler kafkaControlHandler() { + return new KafkaUiHandler(); + } + +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiUtils.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiUtils.java new file mode 100644 index 0000000000000..862fdcfeb2d2d --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/KafkaUiUtils.java @@ -0,0 +1,226 @@ +package io.quarkus.kafka.client.runtime.ui; + +import static io.quarkus.kafka.client.runtime.ui.util.ConsumerFactory.createConsumer; + +import java.util.*; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; + +import javax.inject.Singleton; + +import org.apache.kafka.clients.admin.ConsumerGroupDescription; +import org.apache.kafka.clients.admin.DescribeClusterResult; +import org.apache.kafka.clients.admin.MemberDescription; +import org.apache.kafka.clients.admin.TopicListing; +import org.apache.kafka.clients.consumer.OffsetAndMetadata; +import org.apache.kafka.common.Node; +import org.apache.kafka.common.TopicPartition; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import io.quarkus.kafka.client.runtime.KafkaAdminClient; +import io.quarkus.kafka.client.runtime.ui.model.Order; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaMessageCreateRequest; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaMessagesRequest; +import io.quarkus.kafka.client.runtime.ui.model.request.KafkaOffsetRequest; +import io.quarkus.kafka.client.runtime.ui.model.response.*; +import io.smallrye.common.annotation.Identifier; + +@Singleton +public class KafkaUiUtils { + + private final KafkaAdminClient kafkaAdminClient; + + private final KafkaTopicClient kafkaTopicClient; + private final ObjectMapper objectMapper; + + private final Map config; + + public KafkaUiUtils(KafkaAdminClient kafkaAdminClient, KafkaTopicClient kafkaTopicClient, ObjectMapper objectMapper, + @Identifier("default-kafka-broker") Map config) { + this.kafkaAdminClient = kafkaAdminClient; + this.kafkaTopicClient = kafkaTopicClient; + this.objectMapper = objectMapper; + this.config = config; + } + + public KafkaInfo getKafkaInfo() throws ExecutionException, InterruptedException { + var clusterInfo = getClusterInfo(); + var broker = clusterInfo.getController().asFullNodeName(); + var topics = getTopics(); + var consumerGroups = getConsumerGroups(); + return new KafkaInfo(broker, clusterInfo, topics, consumerGroups); + } + + public KafkaClusterInfo getClusterInfo() throws ExecutionException, InterruptedException { + return clusterInfo(kafkaAdminClient.getCluster()); + } + + private KafkaNode kafkaNode(Node node) { + return new KafkaNode(node.host(), node.port(), node.idString()); + } + + private KafkaClusterInfo clusterInfo(DescribeClusterResult dcr) throws InterruptedException, ExecutionException { + var controller = kafkaNode(dcr.controller().get()); + var nodes = new ArrayList(); + for (var node : dcr.nodes().get()) { + nodes.add(kafkaNode(node)); + } + var aclOperations = dcr.authorizedOperations().get(); + + var aclOperationsStr = new StringBuilder(); + if (aclOperations != null) { + for (var operation : dcr.authorizedOperations().get()) { + if (aclOperationsStr.length() == 0) { + aclOperationsStr.append(", "); + } + aclOperationsStr.append(operation.name()); + } + } else { + aclOperationsStr = new StringBuilder("NONE"); + } + + return new KafkaClusterInfo( + dcr.clusterId().get(), + controller, + nodes, + aclOperationsStr.toString()); + } + + public List getTopics() throws InterruptedException, ExecutionException { + var res = new ArrayList(); + for (TopicListing tl : kafkaAdminClient.getTopics()) { + res.add(kafkaTopic(tl)); + } + return res; + } + + private KafkaTopic kafkaTopic(TopicListing tl) throws ExecutionException, InterruptedException { + var partitions = partitions(tl.name()); + return new KafkaTopic( + tl.name(), + tl.topicId().toString(), + partitions.size(), + tl.isInternal(), + getTopicMessageCount(tl.name(), partitions)); + } + + public long getTopicMessageCount(String topicName, Collection partitions) + throws ExecutionException, InterruptedException { + var maxPartitionOffsetMap = kafkaTopicClient.getPagePartitionOffset(topicName, partitions, Order.NEW_FIRST); + return maxPartitionOffsetMap.values().stream() + .reduce(Long::sum) + .orElse(0L); + } + + public Collection partitions(String topicName) throws ExecutionException, InterruptedException { + return kafkaTopicClient.partitions(topicName); + } + + public KafkaMessagePage getMessages(KafkaMessagesRequest request) throws ExecutionException, InterruptedException { + return kafkaTopicClient.getTopicMessages(request.getTopicName(), request.getOrder(), request.getPartitionOffset(), + request.getPageSize()); + } + + public void createMessage(KafkaMessageCreateRequest request) { + kafkaTopicClient.createMessage(request); + } + + public List getConsumerGroups() throws InterruptedException, ExecutionException { + List res = new ArrayList<>(); + for (ConsumerGroupDescription cgd : kafkaAdminClient.getConsumerGroups()) { + + var metadata = kafkaAdminClient.listConsumerGroupOffsets(cgd.groupId()) + .partitionsToOffsetAndMetadata().get(); + var members = cgd.members().stream() + .map(member -> new KafkaConsumerGroupMember( + member.consumerId(), + member.clientId(), + member.host(), + getPartitionAssignments(metadata, member))) + .collect(Collectors.toSet()); + + res.add(new KafkaConsumerGroup( + cgd.groupId(), + cgd.state().name(), + cgd.coordinator().host(), + cgd.coordinator().id(), + cgd.partitionAssignor(), + getTotalLag(members), + members)); + } + return res; + } + + private long getTotalLag(Set members) { + return members.stream() + .map(KafkaConsumerGroupMember::getPartitions) + .flatMap(Collection::stream) + .map(KafkaConsumerGroupMemberPartitionAssignment::getLag) + .reduce(Long::sum) + .orElse(0L); + } + + private Set getPartitionAssignments( + Map topicOffsetMap, MemberDescription member) { + var topicPartitions = member.assignment().topicPartitions(); + try (var consumer = createConsumer(topicPartitions, config)) { + var endOffsets = consumer.endOffsets(topicPartitions); + + return topicPartitions.stream() + .map(tp -> { + var topicOffset = Optional.ofNullable(topicOffsetMap.get(tp)) + .map(OffsetAndMetadata::offset) + .orElse(0L); + return new KafkaConsumerGroupMemberPartitionAssignment(tp.partition(), tp.topic(), + getLag(topicOffset, endOffsets.get(tp))); + }) + .collect(Collectors.toSet()); + } + } + + private long getLag(long topicOffset, long endOffset) { + return endOffset - topicOffset; + } + + public Map getOffset(KafkaOffsetRequest request) throws ExecutionException, InterruptedException { + return kafkaTopicClient.getPagePartitionOffset(request.getTopicName(), request.getRequestedPartitions(), + request.getOrder()); + } + + public KafkaAclInfo getAclInfo() throws InterruptedException, ExecutionException { + var clusterInfo = clusterInfo(kafkaAdminClient.getCluster()); + var entries = new ArrayList(); + //TODO: fix it after proper error message impl + try { + var acls = kafkaAdminClient.getAclInfo(); + for (var acl : acls) { + var entry = new KafkaAclEntry( + acl.entry().operation().name(), + acl.entry().principal(), + acl.entry().permissionType().name(), + acl.pattern().toString()); + entries.add(entry); + } + } catch (Exception e) { + // this mostly means that ALC controller is absent + } + return new KafkaAclInfo( + clusterInfo.getId(), + clusterInfo.getController().asFullNodeName(), + clusterInfo.getAclOperations(), + entries); + } + + public String toJson(Object o) { + String res; + try { + res = objectMapper.writeValueAsString(o); + } catch (JsonProcessingException ex) { + //FIXME: + res = ""; + } + return res; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/Order.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/Order.java new file mode 100644 index 0000000000000..a94a5565c4a0f --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/Order.java @@ -0,0 +1,6 @@ +package io.quarkus.kafka.client.runtime.ui.model; + +public enum Order { + OLD_FIRST, + NEW_FIRST +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/converter/KafkaModelConverter.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/converter/KafkaModelConverter.java new file mode 100644 index 0000000000000..5eaad0173d129 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/converter/KafkaModelConverter.java @@ -0,0 +1,20 @@ +package io.quarkus.kafka.client.runtime.ui.model.converter; + +import java.util.Optional; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.common.utils.Bytes; + +import io.quarkus.kafka.client.runtime.ui.model.response.KafkaMessage; + +public class KafkaModelConverter { + public KafkaMessage convert(ConsumerRecord message) { + return new KafkaMessage( + message.topic(), + message.partition(), + message.offset(), + message.timestamp(), + Optional.ofNullable(message.key()).map(Bytes::toString).orElse(null), + Optional.ofNullable(message.value()).map(Bytes::toString).orElse(null)); + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaCreateTopicRequest.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaCreateTopicRequest.java new file mode 100644 index 0000000000000..8fbe12f9c2500 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaCreateTopicRequest.java @@ -0,0 +1,28 @@ +package io.quarkus.kafka.client.runtime.ui.model.request; + +public class KafkaCreateTopicRequest { + private String topicName; + private Integer partitions; + private Short replications; + + public KafkaCreateTopicRequest() { + } + + public KafkaCreateTopicRequest(String topicName, Integer partitions, Short replications) { + this.topicName = topicName; + this.partitions = partitions; + this.replications = replications; + } + + public String getTopicName() { + return topicName; + } + + public Integer getPartitions() { + return partitions; + } + + public Short getReplications() { + return replications; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaMessageCreateRequest.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaMessageCreateRequest.java new file mode 100644 index 0000000000000..5dcbebb32fdaa --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaMessageCreateRequest.java @@ -0,0 +1,39 @@ +package io.quarkus.kafka.client.runtime.ui.model.request; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties("action") +public class KafkaMessageCreateRequest { + + //TODO: add headers + private String topic; + private Integer partition; + private String value; + private String key; + + public KafkaMessageCreateRequest() { + } + + public KafkaMessageCreateRequest(String topic, Integer partition, String value, String key) { + this.topic = topic; + this.partition = partition; + this.value = value; + this.key = key; + } + + public String getTopic() { + return topic; + } + + public Integer getPartition() { + return partition; + } + + public String getValue() { + return value; + } + + public String getKey() { + return key; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaMessagesRequest.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaMessagesRequest.java new file mode 100644 index 0000000000000..71fda0e79d8a6 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaMessagesRequest.java @@ -0,0 +1,51 @@ +package io.quarkus.kafka.client.runtime.ui.model.request; + +import java.util.Map; + +import io.quarkus.kafka.client.runtime.ui.model.Order; + +public class KafkaMessagesRequest { + private String topicName; + private Order order; + private int pageSize; + private Integer pageNumber; + + private Map partitionOffset; + + public KafkaMessagesRequest() { + } + + public KafkaMessagesRequest(String topicName, Order order, int pageSize, int pageNumber) { + this.topicName = topicName; + this.order = order; + this.pageSize = pageSize; + this.pageNumber = pageNumber; + } + + public KafkaMessagesRequest(String topicName, Order order, int pageSize, Map partitionOffset) { + this.topicName = topicName; + this.order = order; + this.pageSize = pageSize; + this.partitionOffset = partitionOffset; + } + + public String getTopicName() { + return topicName; + } + + public Order getOrder() { + return order; + } + + public int getPageSize() { + return pageSize; + } + + public int getPageNumber() { + return pageNumber; + } + + public Map getPartitionOffset() { + return partitionOffset; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaOffsetRequest.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaOffsetRequest.java new file mode 100644 index 0000000000000..f9fa52cdb7369 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/request/KafkaOffsetRequest.java @@ -0,0 +1,32 @@ +package io.quarkus.kafka.client.runtime.ui.model.request; + +import java.util.List; + +import io.quarkus.kafka.client.runtime.ui.model.Order; + +public class KafkaOffsetRequest { + private String topicName; + private List requestedPartitions; + private Order order; + + public KafkaOffsetRequest() { + } + + public KafkaOffsetRequest(String topicName, List requestedPartitions, Order order) { + this.topicName = topicName; + this.requestedPartitions = requestedPartitions; + this.order = order; + } + + public String getTopicName() { + return topicName; + } + + public List getRequestedPartitions() { + return requestedPartitions; + } + + public Order getOrder() { + return order; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaAclEntry.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaAclEntry.java new file mode 100644 index 0000000000000..b32a0d729f6b7 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaAclEntry.java @@ -0,0 +1,34 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +public class KafkaAclEntry { + private String operation; + private String principal; + private String perm; + private String pattern; + + public KafkaAclEntry() { + } + + public KafkaAclEntry(String operation, String principal, String perm, String pattern) { + this.operation = operation; + this.principal = principal; + this.perm = perm; + this.pattern = pattern; + } + + public String getOperation() { + return operation; + } + + public String getPrincipal() { + return principal; + } + + public String getPerm() { + return perm; + } + + public String getPattern() { + return pattern; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaAclInfo.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaAclInfo.java new file mode 100644 index 0000000000000..4e53287f220b7 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaAclInfo.java @@ -0,0 +1,37 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +import java.util.ArrayList; +import java.util.List; + +public class KafkaAclInfo { + private String clusterId; + private String broker; + private String aclOperations; + private List entries = new ArrayList<>(); + + public KafkaAclInfo() { + } + + public KafkaAclInfo(String clusterId, String broker, String aclOperations, List entries) { + this.clusterId = clusterId; + this.broker = broker; + this.aclOperations = aclOperations; + this.entries = entries; + } + + public String getClusterId() { + return clusterId; + } + + public String getBroker() { + return broker; + } + + public String getAclOperations() { + return aclOperations; + } + + public List getEntries() { + return entries; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaClusterInfo.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaClusterInfo.java new file mode 100644 index 0000000000000..71e8e67c69b11 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaClusterInfo.java @@ -0,0 +1,37 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +import java.util.ArrayList; +import java.util.List; + +public class KafkaClusterInfo { + private String id; + private KafkaNode controller; + private List nodes = new ArrayList<>(); + private String aclOperations; + + public KafkaClusterInfo() { + } + + public KafkaClusterInfo(String id, KafkaNode controller, List nodes, String aclOperations) { + this.id = id; + this.controller = controller; + this.nodes = nodes; + this.aclOperations = aclOperations; + } + + public String getId() { + return id; + } + + public KafkaNode getController() { + return controller; + } + + public List getNodes() { + return nodes; + } + + public String getAclOperations() { + return aclOperations; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroup.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroup.java new file mode 100644 index 0000000000000..e6506837534eb --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroup.java @@ -0,0 +1,56 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +import java.util.Collection; + +public class KafkaConsumerGroup { + private String name; + private String state; + private String coordinatorHost; + private int coordinatorId; + // The assignment strategy + private String protocol; + private long lag; + private Collection members; + + public KafkaConsumerGroup() { + } + + public KafkaConsumerGroup(String name, String state, String coordinatorHost, int coordinatorId, String protocol, long lag, + Collection members) { + this.name = name; + this.state = state; + this.coordinatorHost = coordinatorHost; + this.coordinatorId = coordinatorId; + this.protocol = protocol; + this.lag = lag; + this.members = members; + } + + public String getName() { + return name; + } + + public String getState() { + return state; + } + + public String getCoordinatorHost() { + return coordinatorHost; + } + + public int getCoordinatorId() { + return coordinatorId; + } + + public String getProtocol() { + return protocol; + } + + public long getLag() { + return lag; + } + + public Collection getMembers() { + return members; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroupMember.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroupMember.java new file mode 100644 index 0000000000000..338890414b702 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroupMember.java @@ -0,0 +1,38 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +import java.util.Collection; + +public class KafkaConsumerGroupMember { + private String memberId; + private String clientId; + private String host; + + private Collection partitions; + + public KafkaConsumerGroupMember() { + } + + public KafkaConsumerGroupMember(String memberId, String clientId, String host, + Collection partitions) { + this.memberId = memberId; + this.clientId = clientId; + this.host = host; + this.partitions = partitions; + } + + public String getMemberId() { + return memberId; + } + + public String getClientId() { + return clientId; + } + + public String getHost() { + return host; + } + + public Collection getPartitions() { + return partitions; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroupMemberPartitionAssignment.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroupMemberPartitionAssignment.java new file mode 100644 index 0000000000000..4a722e76d6385 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaConsumerGroupMemberPartitionAssignment.java @@ -0,0 +1,29 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +public class KafkaConsumerGroupMemberPartitionAssignment { + + private int partition; + private String topic; + private long lag; + + public KafkaConsumerGroupMemberPartitionAssignment() { + } + + public KafkaConsumerGroupMemberPartitionAssignment(int partition, String topic, long lag) { + this.partition = partition; + this.topic = topic; + this.lag = lag; + } + + public int getPartition() { + return partition; + } + + public String getTopic() { + return topic; + } + + public long getLag() { + return lag; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaInfo.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaInfo.java new file mode 100644 index 0000000000000..f8a63d09638f5 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaInfo.java @@ -0,0 +1,37 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +import java.util.List; + +public class KafkaInfo { + private String broker; + private KafkaClusterInfo clusterInfo; + private List topics; + private List consumerGroups; + + public KafkaInfo() { + } + + public KafkaInfo(String broker, KafkaClusterInfo clusterInfo, List topics, + List consumerGroups) { + this.broker = broker; + this.clusterInfo = clusterInfo; + this.topics = topics; + this.consumerGroups = consumerGroups; + } + + public String getBroker() { + return broker; + } + + public List getTopics() { + return topics; + } + + public KafkaClusterInfo getClusterInfo() { + return clusterInfo; + } + + public List getConsumerGroups() { + return consumerGroups; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaMessage.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaMessage.java new file mode 100644 index 0000000000000..4b4e246994a94 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaMessage.java @@ -0,0 +1,43 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +public class KafkaMessage { + private final String topic; + private final int partition; + private final long offset; + private final long timestamp; + private final String key; + private final String value; + + public KafkaMessage(String topic, int partition, long offset, long timestamp, String key, String value) { + this.topic = topic; + this.partition = partition; + this.offset = offset; + this.timestamp = timestamp; + this.key = key; + this.value = value; + } + + public String getTopic() { + return topic; + } + + public int getPartition() { + return partition; + } + + public long getOffset() { + return offset; + } + + public long getTimestamp() { + return timestamp; + } + + public String getKey() { + return key; + } + + public String getValue() { + return value; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaMessagePage.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaMessagePage.java new file mode 100644 index 0000000000000..c57aaa6ce5178 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaMessagePage.java @@ -0,0 +1,22 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +import java.util.Collection; +import java.util.Map; + +public class KafkaMessagePage { + private final Map nextOffsets; + private final Collection messages; + + public KafkaMessagePage(Map nextOffsets, Collection messages) { + this.nextOffsets = nextOffsets; + this.messages = messages; + } + + public Map getNextOffsets() { + return nextOffsets; + } + + public Collection getMessages() { + return messages; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaNode.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaNode.java new file mode 100644 index 0000000000000..137645a7c29ee --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaNode.java @@ -0,0 +1,32 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +public class KafkaNode { + private String host; + private int port; + private String id; + + public KafkaNode() { + } + + public KafkaNode(String host, int port, String id) { + this.host = host; + this.port = port; + this.id = id; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public String getId() { + return id; + } + + public String asFullNodeName() { + return host + ":" + port; + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaTopic.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaTopic.java new file mode 100644 index 0000000000000..ab5595d7a8488 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/model/response/KafkaTopic.java @@ -0,0 +1,46 @@ +package io.quarkus.kafka.client.runtime.ui.model.response; + +public class KafkaTopic { + private String name; + private String topicId; + private int partitionsCount; + private boolean internal; + private long nmsg = 0; + + public KafkaTopic() { + } + + public KafkaTopic(String name, String topicId, int partitionsCount, boolean internal, long nmsg) { + this.name = name; + this.topicId = topicId; + this.partitionsCount = partitionsCount; + this.internal = internal; + this.nmsg = nmsg; + } + + public String getName() { + return name; + } + + public String getTopicId() { + return topicId; + } + + public int getPartitionsCount() { + return partitionsCount; + } + + public boolean isInternal() { + return internal; + } + + public long getNmsg() { + return nmsg; + } + + public String toString() { + StringBuilder sb = new StringBuilder(name); + sb.append(" : ").append(topicId); + return sb.toString(); + } +} diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/util/ConsumerFactory.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/util/ConsumerFactory.java new file mode 100644 index 0000000000000..be2c140530860 --- /dev/null +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/ui/util/ConsumerFactory.java @@ -0,0 +1,37 @@ +package io.quarkus.kafka.client.runtime.ui.util; + +import java.util.*; + +import org.apache.kafka.clients.consumer.Consumer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.serialization.BytesDeserializer; +import org.apache.kafka.common.utils.Bytes; + +public class ConsumerFactory { + + public static Consumer createConsumer(String topicName, Integer requestedPartition, + Map commonConfig) { + return createConsumer(List.of(new TopicPartition(topicName, requestedPartition)), commonConfig); + } + + // We must create a new instance per request, as we might have multiple windows open, each with different pagination, filter and thus different cursor. + public static Consumer createConsumer(Collection requestedPartitions, + Map commonConfig) { + Map config = new HashMap<>(commonConfig); + //TODO: make generic? + config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class); + config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class); + + config.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafka-ui-" + UUID.randomUUID()); + + // For pagination, we require manual management of offset pointer. + config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); + + var consumer = new KafkaConsumer(config); + consumer.assign(requestedPartitions); + return consumer; + } + +} diff --git a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java index 2a0c390f6a944..d1100b65e1bc7 100644 --- a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java +++ b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java @@ -69,6 +69,7 @@ import io.quarkus.mongodb.runtime.dns.MongoDnsClientProvider; import io.quarkus.runtime.metrics.MetricsFactory; import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem; +import io.quarkus.vertx.deployment.VertxBuildItem; public class MongoClientProcessor { private static final String MONGODB_TRACING_COMMANDLISTENER_CLASSNAME = "io.quarkus.mongodb.tracing.MongoTracingCommandListener"; @@ -281,7 +282,8 @@ void generateClientBeans(MongoClientRecorder recorder, MongoClientBuildTimeConfig mongoClientBuildTimeConfig, MongodbConfig mongodbConfig, List mongoUnremovableClientsBuildItem, - BuildProducer syntheticBeanBuildItemBuildProducer) { + BuildProducer syntheticBeanBuildItemBuildProducer, + VertxBuildItem vertxBuildItem) { boolean makeUnremovable = !mongoUnremovableClientsBuildItem.isEmpty(); @@ -328,6 +330,8 @@ void generateClientBeans(MongoClientRecorder recorder, .produce(createReactiveSyntheticBean(recorder, mongodbConfig, makeUnremovable, mongoClientName.getName(), mongoClientName.isAddQualifier())); } + + recorder.performInitialization(mongodbConfig, vertxBuildItem.getVertx()); } private SyntheticBeanBuildItem createBlockingSyntheticBean(MongoClientRecorder recorder, MongodbConfig mongodbConfig, diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java index ada28f77e6b05..d5a6b29c418a1 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java @@ -9,6 +9,7 @@ import javax.enterprise.inject.literal.NamedLiteral; import javax.enterprise.util.AnnotationLiteral; +import com.mongodb.ConnectionString; import com.mongodb.client.MongoClient; import com.mongodb.event.ConnectionPoolListener; @@ -16,8 +17,10 @@ import io.quarkus.mongodb.metrics.MicrometerConnectionPoolListener; import io.quarkus.mongodb.metrics.MongoMetricsConnectionPoolListener; import io.quarkus.mongodb.reactive.ReactiveMongoClient; +import io.quarkus.mongodb.runtime.dns.MongoDnsClientProvider; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; +import io.vertx.core.Vertx; @Recorder public class MongoClientRecorder { @@ -104,4 +107,25 @@ public ConnectionPoolListener get() { } }; } + + /** + * We need to perform some initialization work on the main thread to ensure that reactive operations (such as DNS + * resolution) + * don't end up being performed on the event loop + */ + public void performInitialization(MongodbConfig config, RuntimeValue vertx) { + MongoDnsClientProvider.vertx = vertx.getValue(); + initializeDNSLookup(config.defaultMongoClientConfig); + for (MongoClientConfig mongoClientConfig : config.mongoClientConfigs.values()) { + initializeDNSLookup(mongoClientConfig); + } + } + + private void initializeDNSLookup(MongoClientConfig mongoClientConfig) { + if (mongoClientConfig.connectionString.isEmpty()) { + return; + } + // this ensures that DNS resolution will take place if necessary + new ConnectionString(mongoClientConfig.connectionString.get()); + } } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClient.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClient.java index 4f89e8f8d1e13..e12c70dc854b9 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClient.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClient.java @@ -9,6 +9,9 @@ import java.time.Duration; import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -20,7 +23,6 @@ import com.mongodb.spi.dns.DnsClient; import com.mongodb.spi.dns.DnsException; -import io.quarkus.arc.Arc; import io.quarkus.mongodb.runtime.MongodbConfig; import io.quarkus.runtime.annotations.RegisterForReflection; import io.vertx.core.dns.DnsClientOptions; @@ -46,8 +48,14 @@ public class MongoDnsClient implements DnsClient { private final io.vertx.mutiny.core.dns.DnsClient dnsClient; - MongoDnsClient() { - Vertx vertx = Arc.container().instance(Vertx.class).get(); + // the static fields are used in order to hold DNS resolution result that has been performed on the main thread + // at application startup + // the reason we need this is to ensure that no blocking of event loop threads will occur due to DNS resolution + private static final Map> SRV_CACHE = new ConcurrentHashMap<>(); + private static final Map> TXT_CACHE = new ConcurrentHashMap<>(); + + MongoDnsClient(io.vertx.core.Vertx vertx) { + Vertx mutinyVertx = new io.vertx.mutiny.core.Vertx(vertx); boolean activity = config.getOptionalValue(DNS_LOG_ACTIVITY, Boolean.class).orElse(false); @@ -69,7 +77,7 @@ public class MongoDnsClient implements DnsClient { .setHost(server) .setPort(port); } - dnsClient = vertx.createDnsClient(dnsClientOptions); + dnsClient = mutinyVertx.createDnsClient(dnsClientOptions); } private static List nameServers() { @@ -118,7 +126,17 @@ private List resolveSrvRequest(final String srvHost) { .orElse(Duration.ofSeconds(5)); try { - List srvRecords = dnsClient.resolveSRV(srvHost).await().atMost(timeout); + List srvRecords; + if (SRV_CACHE.containsKey(srvHost)) { + srvRecords = SRV_CACHE.get(srvHost); + } else { + srvRecords = dnsClient.resolveSRV(srvHost).invoke(new Consumer<>() { + @Override + public void accept(List srvRecords) { + SRV_CACHE.put(srvHost, srvRecords); + } + }).await().atMost(timeout); + } if (srvRecords.isEmpty()) { throw new MongoConfigurationException("No SRV records available for host " + srvHost); @@ -143,11 +161,18 @@ private List resolveSrvRequest(final String srvHost) { * Here we concatenate TXT records together with a '&' separator as required by connection strings */ public List resolveTxtRequest(final String host) { + if (TXT_CACHE.containsKey(host)) { + return TXT_CACHE.get(host); + } try { Duration timeout = config.getOptionalValue(DNS_LOOKUP_TIMEOUT, Duration.class) .orElse(Duration.ofSeconds(5)); - - return dnsClient.resolveTXT(host).await().atMost(timeout); + return dnsClient.resolveTXT(host).invoke(new Consumer<>() { + @Override + public void accept(List strings) { + TXT_CACHE.put(host, strings); + } + }).await().atMost(timeout); } catch (Throwable e) { throw new MongoConfigurationException("Unable to look up TXT record for host " + host, e); } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClientProvider.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClientProvider.java index 36c2113d4b34e..c2ce1db24d8b4 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClientProvider.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/dns/MongoDnsClientProvider.java @@ -4,11 +4,15 @@ import com.mongodb.spi.dns.DnsClientProvider; import io.quarkus.runtime.annotations.RegisterForReflection; +import io.vertx.core.Vertx; @RegisterForReflection public class MongoDnsClientProvider implements DnsClientProvider { + + public static volatile Vertx vertx; + @Override public DnsClient create() { - return new MongoDnsClient(); + return new MongoDnsClient(vertx); } } diff --git a/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaInitBuildItem.java b/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaInitBuildItem.java new file mode 100644 index 0000000000000..5cbee9c7ce40d --- /dev/null +++ b/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaInitBuildItem.java @@ -0,0 +1,9 @@ +package io.quarkus.narayana.jta.deployment; + +import io.quarkus.builder.item.EmptyBuildItem; + +/** + * Marker build item that indicates that the Narayana JTA extension has been initialized. + */ +public final class NarayanaInitBuildItem extends EmptyBuildItem { +} diff --git a/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java b/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java index 397f771bdfc79..18e88ec312178 100644 --- a/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java +++ b/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java @@ -10,11 +10,20 @@ import javax.transaction.TransactionScoped; import com.arjuna.ats.arjuna.common.ObjectStoreEnvironmentBean; +import com.arjuna.ats.arjuna.recovery.TransactionStatusConnectionManager; import com.arjuna.ats.internal.arjuna.coordinator.CheckedActionFactoryImple; import com.arjuna.ats.internal.arjuna.objectstore.ShadowNoFileLockStore; +import com.arjuna.ats.internal.arjuna.recovery.AtomicActionExpiryScanner; +import com.arjuna.ats.internal.arjuna.recovery.AtomicActionRecoveryModule; +import com.arjuna.ats.internal.arjuna.recovery.ExpiredTransactionStatusManagerScanner; import com.arjuna.ats.internal.arjuna.utils.SocketProcessId; import com.arjuna.ats.internal.jta.recovery.arjunacore.CommitMarkableResourceRecordRecoveryModule; +import com.arjuna.ats.internal.jta.recovery.arjunacore.JTAActionStatusServiceXAResourceOrphanFilter; +import com.arjuna.ats.internal.jta.recovery.arjunacore.JTANodeNameXAResourceOrphanFilter; +import com.arjuna.ats.internal.jta.recovery.arjunacore.JTATransactionLogXAResourceOrphanFilter; import com.arjuna.ats.internal.jta.recovery.arjunacore.RecoverConnectableAtomicAction; +import com.arjuna.ats.internal.jta.recovery.arjunacore.XARecoveryModule; +import com.arjuna.ats.internal.jta.resources.arjunacore.XAResourceRecord; import com.arjuna.ats.internal.jta.transaction.arjunacore.TransactionManagerImple; import com.arjuna.ats.internal.jta.transaction.arjunacore.TransactionSynchronizationRegistryImple; import com.arjuna.ats.internal.jta.transaction.arjunacore.UserTransactionImple; @@ -32,6 +41,7 @@ import io.quarkus.deployment.IsTest; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.Produce; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.ShutdownContextBuildItem; @@ -64,6 +74,7 @@ public NativeImageSystemPropertyBuildItem nativeImageSystemPropertyBuildItem() { @BuildStep @Record(RUNTIME_INIT) + @Produce(NarayanaInitBuildItem.class) public void build(NarayanaJtaRecorder recorder, BuildProducer additionalBeans, BuildProducer reflectiveClass, @@ -81,6 +92,9 @@ public void build(NarayanaJtaRecorder recorder, runtimeInit.produce(new RuntimeInitializedClassBuildItem(SocketProcessId.class.getName())); runtimeInit.produce(new RuntimeInitializedClassBuildItem(CommitMarkableResourceRecordRecoveryModule.class.getName())); runtimeInit.produce(new RuntimeInitializedClassBuildItem(RecoverConnectableAtomicAction.class.getName())); + runtimeInit.produce(new RuntimeInitializedClassBuildItem(TransactionStatusConnectionManager.class.getName())); + runtimeInit.produce(new RuntimeInitializedClassBuildItem(JTAActionStatusServiceXAResourceOrphanFilter.class.getName())); + runtimeInit.produce(new RuntimeInitializedClassBuildItem(AtomicActionExpiryScanner.class.getName())); reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, JTAEnvironmentBean.class.getName(), UserTransactionImple.class.getName(), @@ -89,7 +103,14 @@ public void build(NarayanaJtaRecorder recorder, TransactionSynchronizationRegistryImple.class.getName(), ObjectStoreEnvironmentBean.class.getName(), ShadowNoFileLockStore.class.getName(), - SocketProcessId.class.getName())); + SocketProcessId.class.getName(), + AtomicActionRecoveryModule.class.getName(), + XARecoveryModule.class.getName(), + XAResourceRecord.class.getName(), + JTATransactionLogXAResourceOrphanFilter.class.getName(), + JTANodeNameXAResourceOrphanFilter.class.getName(), + JTAActionStatusServiceXAResourceOrphanFilter.class.getName(), + ExpiredTransactionStatusManagerScanner.class.getName())); AdditionalBeanBuildItem.Builder builder = AdditionalBeanBuildItem.builder(); builder.addBeanClass(TransactionalInterceptorSupports.class); diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java index 85e4b99d2e17e..abfb7d0d36f97 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java @@ -3,6 +3,7 @@ import javax.enterprise.context.ApplicationScoped; import javax.enterprise.context.Dependent; import javax.enterprise.inject.Produces; +import javax.inject.Singleton; import javax.transaction.TransactionSynchronizationRegistry; import org.jboss.tm.JBossXATerminator; @@ -32,9 +33,12 @@ public javax.transaction.UserTransaction userTransaction() { } @Produces - @ApplicationScoped + @Singleton public XAResourceRecoveryRegistry xaResourceRecoveryRegistry() { - return new RecoveryManagerService(); + RecoveryManagerService recoveryManagerService = new RecoveryManagerService(); + recoveryManagerService.create(); + recoveryManagerService.start(); + return recoveryManagerService; } @Produces diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java index b66dd67487d37..82d2e61b95ec4 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java @@ -7,10 +7,15 @@ import org.jboss.logging.Logger; import com.arjuna.ats.arjuna.common.CoreEnvironmentBeanException; +import com.arjuna.ats.arjuna.common.ObjectStoreEnvironmentBean; +import com.arjuna.ats.arjuna.common.RecoveryEnvironmentBean; import com.arjuna.ats.arjuna.common.arjPropertyManager; import com.arjuna.ats.arjuna.coordinator.TransactionReaper; import com.arjuna.ats.arjuna.coordinator.TxControl; +import com.arjuna.ats.arjuna.recovery.RecoveryManager; +import com.arjuna.ats.jta.common.JTAEnvironmentBean; import com.arjuna.ats.jta.common.jtaPropertyManager; +import com.arjuna.common.internal.util.propertyservice.BeanPopulator; import com.arjuna.common.util.propertyservice.PropertiesFactory; import io.quarkus.runtime.ShutdownContext; @@ -67,13 +72,25 @@ public void disableTransactionStatusManager() { } public void setConfig(final TransactionManagerConfiguration transactions) { - arjPropertyManager.getObjectStoreEnvironmentBean().setObjectStoreDir(transactions.objectStoreDirectory); + BeanPopulator.getDefaultInstance(ObjectStoreEnvironmentBean.class) + .setObjectStoreDir(transactions.objectStoreDirectory); + BeanPopulator.getNamedInstance(ObjectStoreEnvironmentBean.class, "communicationStore") + .setObjectStoreDir(transactions.objectStoreDirectory); + BeanPopulator.getNamedInstance(ObjectStoreEnvironmentBean.class, "stateStore") + .setObjectStoreDir(transactions.objectStoreDirectory); + BeanPopulator.getDefaultInstance(RecoveryEnvironmentBean.class) + .setRecoveryModuleClassNames(transactions.recoveryModules); + BeanPopulator.getDefaultInstance(RecoveryEnvironmentBean.class) + .setExpiryScannerClassNames(transactions.expiryScanners); + BeanPopulator.getDefaultInstance(JTAEnvironmentBean.class) + .setXaResourceOrphanFilterClassNames(transactions.xaResourceOrphanFilters); } public void handleShutdown(ShutdownContext context) { context.addLastShutdownTask(new Runnable() { @Override public void run() { + RecoveryManager.manager().terminate(true); TransactionReaper.terminate(false); } }); diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java index ee2c70841b308..b4f8ab1ee8cc1 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java @@ -1,6 +1,7 @@ package io.quarkus.narayana.jta.runtime; import java.time.Duration; +import java.util.List; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; @@ -30,4 +31,25 @@ public final class TransactionManagerConfiguration { */ @ConfigItem(defaultValue = "ObjectStore") public String objectStoreDirectory; + + /** + * The list of recovery modules + */ + @ConfigItem(defaultValue = "com.arjuna.ats.internal.arjuna.recovery.AtomicActionRecoveryModule," + + "com.arjuna.ats.internal.jta.recovery.arjunacore.XARecoveryModule") + public List recoveryModules; + + /** + * The list of expiry scanners + */ + @ConfigItem(defaultValue = "com.arjuna.ats.internal.arjuna.recovery.ExpiredTransactionStatusManagerScanner") + public List expiryScanners; + + /** + * The list of orphan filters + */ + @ConfigItem(defaultValue = "com.arjuna.ats.internal.jta.recovery.arjunacore.JTATransactionLogXAResourceOrphanFilter," + + "com.arjuna.ats.internal.jta.recovery.arjunacore.JTANodeNameXAResourceOrphanFilter," + + "com.arjuna.ats.internal.jta.recovery.arjunacore.JTAActionStatusServiceXAResourceOrphanFilter") + public List xaResourceOrphanFilters; } diff --git a/extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientBuilderFactory.java b/extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientBuilderFactory.java new file mode 100644 index 0000000000000..3e9222f6f4106 --- /dev/null +++ b/extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientBuilderFactory.java @@ -0,0 +1,34 @@ +package io.quarkus.restclient.config; + +import java.util.ServiceLoader; + +import org.eclipse.microprofile.rest.client.RestClientBuilder; + +/** + * Factory which creates MicroProfile RestClientBuilder instance configured according to current Quarkus application + * configuration. + * + * The builder instance can be further tweaked, if needed, before building the rest client proxy. + */ +public interface RestClientBuilderFactory { + + default RestClientBuilder newBuilder(Class proxyType) { + return newBuilder(proxyType, RestClientsConfig.getInstance()); + } + + RestClientBuilder newBuilder(Class proxyType, RestClientsConfig restClientsConfigRoot); + + static RestClientBuilderFactory getInstance() { + ServiceLoader sl = ServiceLoader.load(RestClientBuilderFactory.class); + RestClientBuilderFactory instance = null; + for (RestClientBuilderFactory spi : sl) { + if (instance != null) { + throw new IllegalStateException("Multiple RestClientBuilderFactory implementations found: " + + spi.getClass().getName() + " and " + + instance.getClass().getName()); + } + instance = spi; + } + return instance; + } +} diff --git a/extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientsConfig.java b/extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientsConfig.java index e13cb182e59da..99f4c9c117f9e 100644 --- a/extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientsConfig.java +++ b/extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientsConfig.java @@ -65,8 +65,6 @@ public class RestClientsConfig { * (or IP address) and port for requests of clients to use. * * Can be overwritten by client-specific settings. - * - * This property is applicable to reactive REST clients only. */ @ConfigItem public Optional proxyAddress; diff --git a/extensions/resteasy-classic/rest-client/deployment/src/test/java/io/quarkus/restclient/configuration/ClassicRestClientBuilderFactoryTest.java b/extensions/resteasy-classic/rest-client/deployment/src/test/java/io/quarkus/restclient/configuration/ClassicRestClientBuilderFactoryTest.java new file mode 100644 index 0000000000000..03d9485ea7dba --- /dev/null +++ b/extensions/resteasy-classic/rest-client/deployment/src/test/java/io/quarkus/restclient/configuration/ClassicRestClientBuilderFactoryTest.java @@ -0,0 +1,36 @@ +package io.quarkus.restclient.configuration; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.eclipse.microprofile.rest.client.RestClientBuilder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.restclient.config.RestClientBuilderFactory; +import io.quarkus.test.QuarkusUnitTest; + +public class ClassicRestClientBuilderFactoryTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar.addClasses(EchoClientWithoutAnnotation.class, EchoClientWithConfigKey.class, + EchoResource.class)) + .withConfigurationResource("factory-test-application.properties"); + + @Test + public void testAnnotatedClientClass() { + RestClientBuilder restClientBuilder = RestClientBuilderFactory.getInstance().newBuilder(EchoClientWithConfigKey.class); + EchoClientWithConfigKey restClient = restClientBuilder.build(EchoClientWithConfigKey.class); + + assertThat(restClient.echo("Hello")).contains("Hello"); + } + + @Test + public void testNotAnnotatedClientClass() { + RestClientBuilder restClientBuilder = RestClientBuilderFactory.getInstance() + .newBuilder(EchoClientWithoutAnnotation.class); + EchoClientWithoutAnnotation restClient = restClientBuilder.build(EchoClientWithoutAnnotation.class); + + assertThat(restClient.echo("Hello")).contains("Hello"); + } +} diff --git a/extensions/resteasy-classic/rest-client/deployment/src/test/java/io/quarkus/restclient/configuration/EchoClientWithoutAnnotation.java b/extensions/resteasy-classic/rest-client/deployment/src/test/java/io/quarkus/restclient/configuration/EchoClientWithoutAnnotation.java new file mode 100644 index 0000000000000..f27d86054b4f6 --- /dev/null +++ b/extensions/resteasy-classic/rest-client/deployment/src/test/java/io/quarkus/restclient/configuration/EchoClientWithoutAnnotation.java @@ -0,0 +1,18 @@ +package io.quarkus.restclient.configuration; + +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; + +@Path("/echo") +public interface EchoClientWithoutAnnotation { + + @GET + @Produces(MediaType.TEXT_PLAIN) + @Consumes(MediaType.TEXT_PLAIN) + String echo(@QueryParam("message") String message); + +} diff --git a/extensions/resteasy-classic/rest-client/deployment/src/test/resources/factory-test-application.properties b/extensions/resteasy-classic/rest-client/deployment/src/test/resources/factory-test-application.properties new file mode 100644 index 0000000000000..da5f5178a2e45 --- /dev/null +++ b/extensions/resteasy-classic/rest-client/deployment/src/test/resources/factory-test-application.properties @@ -0,0 +1,3 @@ +quarkus.rest-client.echo-client.url=http://localhost:8081 +quarkus.rest-client.EchoClientWithoutAnnotation.url=http://localhost:8081 +quarkus.rest-client.read-timeout=3456 diff --git a/extensions/resteasy-classic/rest-client/runtime/src/main/java/io/quarkus/restclient/runtime/ClassicRestClientBuilderFactory.java b/extensions/resteasy-classic/rest-client/runtime/src/main/java/io/quarkus/restclient/runtime/ClassicRestClientBuilderFactory.java new file mode 100644 index 0000000000000..f8f51859f6fce --- /dev/null +++ b/extensions/resteasy-classic/rest-client/runtime/src/main/java/io/quarkus/restclient/runtime/ClassicRestClientBuilderFactory.java @@ -0,0 +1,27 @@ +package io.quarkus.restclient.runtime; + +import org.eclipse.microprofile.rest.client.RestClientBuilder; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; + +import io.quarkus.restclient.config.RestClientBuilderFactory; +import io.quarkus.restclient.config.RestClientsConfig; + +public class ClassicRestClientBuilderFactory implements RestClientBuilderFactory { + + public RestClientBuilder newBuilder(Class proxyType, RestClientsConfig restClientsConfigRoot) { + RegisterRestClient annotation = proxyType.getAnnotation(RegisterRestClient.class); + String configKey = null; + String baseUri = null; + if (annotation != null) { + configKey = annotation.configKey(); + baseUri = annotation.baseUri(); + } + + RestClientBuilder restClientBuilder = RestClientBuilder.newBuilder(); + RestClientBase restClientBase = new RestClientBase(proxyType, baseUri, configKey, new Class[0], restClientsConfigRoot); + restClientBase.configureBuilder(restClientBuilder); + + return restClientBuilder; + } + +} diff --git a/extensions/resteasy-classic/rest-client/runtime/src/main/java/io/quarkus/restclient/runtime/RestClientBase.java b/extensions/resteasy-classic/rest-client/runtime/src/main/java/io/quarkus/restclient/runtime/RestClientBase.java index fab97f03429f5..4380c9ef7b333 100644 --- a/extensions/resteasy-classic/rest-client/runtime/src/main/java/io/quarkus/restclient/runtime/RestClientBase.java +++ b/extensions/resteasy-classic/rest-client/runtime/src/main/java/io/quarkus/restclient/runtime/RestClientBase.java @@ -66,7 +66,7 @@ public Object create() { return builder.build(proxyType); } - void configureBuilder(RestClientBuilder builder) { + protected void configureBuilder(RestClientBuilder builder) { configureBaseUrl(builder); configureTimeouts(builder); configureProviders(builder); @@ -77,7 +77,7 @@ void configureBuilder(RestClientBuilder builder) { configureCustomProperties(builder); } - private void configureCustomProperties(RestClientBuilder builder) { + protected void configureCustomProperties(RestClientBuilder builder) { Optional connectionPoolSize = oneOf(clientConfigByClassName().connectionPoolSize, clientConfigByConfigKey().connectionPoolSize, configRoot.connectionPoolSize); if (connectionPoolSize.isPresent()) { @@ -92,7 +92,7 @@ private void configureCustomProperties(RestClientBuilder builder) { } } - private void configureProxy(RestClientBuilder builder) { + protected void configureProxy(RestClientBuilder builder) { Optional proxyAddress = oneOf(clientConfigByClassName().proxyAddress, clientConfigByConfigKey().proxyAddress, configRoot.proxyAddress); if (proxyAddress.isPresent() && !NONE.equals(proxyAddress.get())) { @@ -116,7 +116,7 @@ private void configureProxy(RestClientBuilder builder) { } } - private void configureRedirects(RestClientBuilder builder) { + protected void configureRedirects(RestClientBuilder builder) { Optional followRedirects = oneOf(clientConfigByClassName().followRedirects, clientConfigByConfigKey().followRedirects, configRoot.followRedirects); if (followRedirects.isPresent()) { @@ -124,7 +124,7 @@ private void configureRedirects(RestClientBuilder builder) { } } - private void configureQueryParamStyle(RestClientBuilder builder) { + protected void configureQueryParamStyle(RestClientBuilder builder) { Optional queryParamStyle = oneOf(clientConfigByClassName().queryParamStyle, clientConfigByConfigKey().queryParamStyle, configRoot.queryParamStyle); if (queryParamStyle.isPresent()) { @@ -132,7 +132,7 @@ private void configureQueryParamStyle(RestClientBuilder builder) { } } - private void configureSsl(RestClientBuilder builder) { + protected void configureSsl(RestClientBuilder builder) { Optional trustStore = oneOf(clientConfigByClassName().trustStore, clientConfigByConfigKey().trustStore, configRoot.trustStore); if (trustStore.isPresent() && !trustStore.get().isBlank() && !NONE.equals(trustStore.get())) { @@ -249,7 +249,7 @@ private InputStream locateStream(String path) throws FileNotFoundException { } } - private void configureProviders(RestClientBuilder builder) { + protected void configureProviders(RestClientBuilder builder) { Optional providers = oneOf(clientConfigByClassName().providers, clientConfigByConfigKey().providers, configRoot.providers); @@ -277,7 +277,7 @@ private Class providerClassForName(String name) { } } - private void configureTimeouts(RestClientBuilder builder) { + protected void configureTimeouts(RestClientBuilder builder) { Long connectTimeout = oneOf(clientConfigByClassName().connectTimeout, clientConfigByConfigKey().connectTimeout).orElse(this.configRoot.connectTimeout); if (connectTimeout != null) { @@ -291,7 +291,7 @@ private void configureTimeouts(RestClientBuilder builder) { } } - private void configureBaseUrl(RestClientBuilder builder) { + protected void configureBaseUrl(RestClientBuilder builder) { Optional baseUrlOptional = oneOf(clientConfigByClassName().uri, clientConfigByConfigKey().uri); if (baseUrlOptional.isEmpty()) { baseUrlOptional = oneOf(clientConfigByClassName().url, clientConfigByConfigKey().url); @@ -333,7 +333,7 @@ private RestClientConfig clientConfigByClassName() { @SafeVarargs private static Optional oneOf(Optional... optionals) { for (Optional o : optionals) { - if (o.isPresent()) { + if (o != null && o.isPresent()) { return o; } } diff --git a/extensions/resteasy-classic/rest-client/runtime/src/main/resources/META-INF/services/io.quarkus.restclient.config.RestClientBuilderFactory b/extensions/resteasy-classic/rest-client/runtime/src/main/resources/META-INF/services/io.quarkus.restclient.config.RestClientBuilderFactory new file mode 100644 index 0000000000000..0f3850a8499fb --- /dev/null +++ b/extensions/resteasy-classic/rest-client/runtime/src/main/resources/META-INF/services/io.quarkus.restclient.config.RestClientBuilderFactory @@ -0,0 +1 @@ +io.quarkus.restclient.runtime.ClassicRestClientBuilderFactory \ No newline at end of file diff --git a/extensions/resteasy-reactive/quarkus-resteasy-reactive/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/providers/ParamConverterTest.java b/extensions/resteasy-reactive/quarkus-resteasy-reactive/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/providers/ParamConverterTest.java index e1f5c8fa6d979..c0c2447d651c7 100644 --- a/extensions/resteasy-reactive/quarkus-resteasy-reactive/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/providers/ParamConverterTest.java +++ b/extensions/resteasy-reactive/quarkus-resteasy-reactive/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/providers/ParamConverterTest.java @@ -73,11 +73,11 @@ public String set(@QueryParam("id") Set uuids) { @Path("list") @GET - public String list(@QueryParam("id") List uuids) { + public String list(@QueryParam("id") List uuids) { return join(uuids.stream()); } - private static String join(Stream uuids) { + private static String join(Stream uuids) { return uuids.map(UUID::toString).collect(Collectors.joining(",")); } } diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/ReactiveRestClientBuilderFactoryTest.java b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/ReactiveRestClientBuilderFactoryTest.java new file mode 100644 index 0000000000000..f8546ea1c32d5 --- /dev/null +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/java/io/quarkus/rest/client/reactive/ReactiveRestClientBuilderFactoryTest.java @@ -0,0 +1,29 @@ +package io.quarkus.rest.client.reactive; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.eclipse.microprofile.rest.client.RestClientBuilder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.rest.client.reactive.configuration.EchoResource; +import io.quarkus.restclient.config.RestClientBuilderFactory; +import io.quarkus.test.QuarkusUnitTest; + +public class ReactiveRestClientBuilderFactoryTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar.addClasses(HelloClient2.class, EchoResource.class)) + .withConfigurationResource("factory-test-application.properties"); + + @Test + public void test() throws Exception { + RestClientBuilder restClientBuilder = RestClientBuilderFactory.getInstance().newBuilder(HelloClient2.class); + HelloClient2 restClient = restClientBuilder.build(HelloClient2.class); + + assertThat(restClientBuilder.getConfiguration().getProperties().get("io.quarkus.rest.client.read-timeout")) + .isEqualTo(3456L); + assertThat(restClient.echo("Hello")).contains("Hello"); + } +} diff --git a/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/resources/factory-test-application.properties b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/resources/factory-test-application.properties new file mode 100644 index 0000000000000..5bf6f265622e7 --- /dev/null +++ b/extensions/resteasy-reactive/rest-client-reactive/deployment/src/test/resources/factory-test-application.properties @@ -0,0 +1,2 @@ +quarkus.rest-client.hello2.url=http://localhost:8081/hello +quarkus.rest-client.read-timeout=3456 \ No newline at end of file diff --git a/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/ReactiveRestClientBuilderFactory.java b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/ReactiveRestClientBuilderFactory.java new file mode 100644 index 0000000000000..d055bda239e18 --- /dev/null +++ b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/ReactiveRestClientBuilderFactory.java @@ -0,0 +1,28 @@ +package io.quarkus.rest.client.reactive.runtime; + +import org.eclipse.microprofile.rest.client.RestClientBuilder; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; + +import io.quarkus.restclient.config.RestClientBuilderFactory; +import io.quarkus.restclient.config.RestClientsConfig; + +public class ReactiveRestClientBuilderFactory implements RestClientBuilderFactory { + + public RestClientBuilder newBuilder(Class proxyType, RestClientsConfig restClientsConfigRoot) { + RegisterRestClient annotation = proxyType.getAnnotation(RegisterRestClient.class); + String configKey = null; + String baseUri = null; + if (annotation != null) { + configKey = annotation.configKey(); + baseUri = annotation.baseUri(); + } + + RestClientBuilderImpl restClientBuilder = new RestClientBuilderImpl(); + RestClientCDIDelegateBuilder restClientBase = new RestClientCDIDelegateBuilder<>(proxyType, baseUri, configKey, + restClientsConfigRoot); + restClientBase.configureBuilder(restClientBuilder); + + return restClientBuilder; + } + +} diff --git a/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilder.java b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilder.java index c5de36ca33ae1..83e647ae666df 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilder.java +++ b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilder.java @@ -60,10 +60,11 @@ private T build() { throw new IllegalStateException("Expected RestClientBuilder to be an instance of " + RestClientBuilderImpl.class.getName() + ", got " + builder.getClass().getName()); } - return build((RestClientBuilderImpl) builder); + configureBuilder((RestClientBuilderImpl) builder); + return builder.build(jaxrsInterface); } - T build(RestClientBuilderImpl builder) { + void configureBuilder(RestClientBuilderImpl builder) { configureBaseUrl(builder); configureTimeouts(builder); configureProviders(builder); @@ -73,12 +74,11 @@ T build(RestClientBuilderImpl builder) { configureProxy(builder); configureShared(builder); configureCustomProperties(builder); - return builder.build(jaxrsInterface); } private void configureCustomProperties(RestClientBuilder builder) { Optional encoder = configRoot.multipartPostEncoderMode; - if (encoder.isPresent()) { + if (encoder != null && encoder.isPresent()) { HttpPostRequestEncoder.EncoderMode mode = HttpPostRequestEncoder.EncoderMode .valueOf(encoder.get().toUpperCase(Locale.ROOT)); builder.property(QuarkusRestClientProperties.MULTIPART_ENCODER_MODE, mode); diff --git a/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/resources/META-INF/services/io.quarkus.restclient.config.RestClientBuilderFactory b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/resources/META-INF/services/io.quarkus.restclient.config.RestClientBuilderFactory new file mode 100644 index 0000000000000..45fa8b003d6ee --- /dev/null +++ b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/main/resources/META-INF/services/io.quarkus.restclient.config.RestClientBuilderFactory @@ -0,0 +1 @@ +io.quarkus.rest.client.reactive.runtime.ReactiveRestClientBuilderFactory \ No newline at end of file diff --git a/extensions/resteasy-reactive/rest-client-reactive/runtime/src/test/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilderTest.java b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/test/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilderTest.java index 7088d7ba1bf04..60f80120fb6a7 100644 --- a/extensions/resteasy-reactive/rest-client-reactive/runtime/src/test/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilderTest.java +++ b/extensions/resteasy-reactive/rest-client-reactive/runtime/src/test/java/io/quarkus/rest/client/reactive/runtime/RestClientCDIDelegateBuilderTest.java @@ -82,7 +82,7 @@ public void testClientSpecificConfigs() { new RestClientCDIDelegateBuilder<>(TestClient.class, "http://localhost:8080", "test-client", - configRoot).build(restClientBuilderMock); + configRoot).configureBuilder(restClientBuilderMock); // then @@ -125,7 +125,7 @@ public void testGlobalConfigs() { new RestClientCDIDelegateBuilder<>(TestClient.class, "http://localhost:8080", "test-client", - configRoot).build(restClientBuilderMock); + configRoot).configureBuilder(restClientBuilderMock); // then diff --git a/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthConfig.java b/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthConfig.java index 8241dc9c1e0ce..08a1066afeea5 100644 --- a/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthConfig.java +++ b/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthConfig.java @@ -1,5 +1,7 @@ package io.quarkus.smallrye.health.deployment; +import java.util.OptionalInt; + import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; @@ -55,6 +57,12 @@ public class SmallRyeHealthConfig { @ConfigItem(defaultValue = "false") boolean contextPropagation; + /** + * The number of the maximum health groups that can be created. + */ + @ConfigItem + OptionalInt maxGroupRegistriesCount; + /** * SmallRye Health UI configuration */ diff --git a/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthProcessor.java b/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthProcessor.java index 539db291a350c..13b3b9bb9272b 100644 --- a/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthProcessor.java +++ b/extensions/smallrye-health/deployment/src/main/java/io/quarkus/smallrye/health/deployment/SmallRyeHealthProcessor.java @@ -242,14 +242,12 @@ public void defineHealthRoutes(BuildProducer routes, .build()); SmallRyeIndividualHealthGroupHandler handler = new SmallRyeIndividualHealthGroupHandler(); - for (String healthGroup : healthGroups) { - routes.produce(nonApplicationRootPathBuildItem.routeBuilder() - .nestedRoute(healthConfig.rootPath, healthConfig.groupPath + "/" + healthGroup) - .handler(handler) - .displayOnNotFoundPage() - .blockingRoute() - .build()); - } + routes.produce(nonApplicationRootPathBuildItem.routeBuilder() + .nestedRoute(healthConfig.rootPath, healthConfig.groupPath + "/*") + .handler(handler) + .displayOnNotFoundPage() + .blockingRoute() + .build()); // Register the wellness handler routes.produce(nonApplicationRootPathBuildItem.routeBuilder() @@ -275,6 +273,10 @@ public void processSmallRyeHealthConfigValues(SmallRyeHealthConfig healthConfig, if (healthConfig.contextPropagation) { config.produce(new RunTimeConfigurationDefaultBuildItem("io.smallrye.health.context.propagation", "true")); } + if (healthConfig.maxGroupRegistriesCount.isPresent()) { + config.produce(new RunTimeConfigurationDefaultBuildItem("io.smallrye.health.maxGroupRegistriesCount", + String.valueOf(healthConfig.maxGroupRegistriesCount.getAsInt()))); + } config.produce(new RunTimeConfigurationDefaultBuildItem("io.smallrye.health.delayChecksInitializations", "true")); } diff --git a/extensions/smallrye-health/deployment/src/test/java/io/quarkus/smallrye/health/test/MaxHealthGroupTest.java b/extensions/smallrye-health/deployment/src/test/java/io/quarkus/smallrye/health/test/MaxHealthGroupTest.java new file mode 100644 index 0000000000000..7074da671b85d --- /dev/null +++ b/extensions/smallrye-health/deployment/src/test/java/io/quarkus/smallrye/health/test/MaxHealthGroupTest.java @@ -0,0 +1,42 @@ +package io.quarkus.smallrye.health.test; + +import static org.hamcrest.Matchers.is; + +import org.hamcrest.Matchers; +import org.jboss.shrinkwrap.api.asset.EmptyAsset; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import io.restassured.RestAssured; +import io.restassured.parsing.Parser; + +public class MaxHealthGroupTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(BasicHealthCheck.class) + .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml")) + .overrideConfigKey("quarkus.smallrye-health.max-group-registries-count", "3"); + + @Test + public void testMaxGroupRegistriesCreations() { + try { + RestAssured.defaultParser = Parser.JSON; + + for (int i = 0; i < 3; i++) { + RestAssured.get("/q/health/group/" + i).then() + .statusCode(200) + .body("status", is("UP"), + "checks.size()", is(0)); + } + RestAssured.when().get("/q/health/group/not-allowed").then() + .statusCode(500) + .body("details", Matchers.endsWith("3")); + } finally { + RestAssured.reset(); + } + } + +} diff --git a/extensions/smallrye-stork/runtime/src/main/java/io/quarkus/stork/StorkConfigUtil.java b/extensions/smallrye-stork/runtime/src/main/java/io/quarkus/stork/StorkConfigUtil.java index db9d9f68b29a8..a87bb3f39b0f5 100644 --- a/extensions/smallrye-stork/runtime/src/main/java/io/quarkus/stork/StorkConfigUtil.java +++ b/extensions/smallrye-stork/runtime/src/main/java/io/quarkus/stork/StorkConfigUtil.java @@ -16,9 +16,9 @@ public static List toStorkServiceConfig(StorkConfiguration storkC for (String serviceName : servicesConfigs) { builder.setServiceName(serviceName); ServiceConfiguration serviceConfiguration = storkConfiguration.serviceConfiguration.get(serviceName); - SimpleServiceConfig.SimpleServiceDiscoveryConfig storkServiceDiscoveryConfig = new SimpleServiceConfig.SimpleServiceDiscoveryConfig( + SimpleServiceConfig.SimpleConfigWithType storkServiceDiscoveryConfig = new SimpleServiceConfig.SimpleConfigWithType( serviceConfiguration.serviceDiscovery.type, serviceConfiguration.serviceDiscovery.params); - builder.setServiceDiscovery(storkServiceDiscoveryConfig); + builder = builder.setServiceDiscovery(storkServiceDiscoveryConfig); SimpleServiceConfig.SimpleLoadBalancerConfig loadBalancerConfig = new SimpleServiceConfig.SimpleLoadBalancerConfig( serviceConfiguration.loadBalancer.type, serviceConfiguration.loadBalancer.parameters); builder.setLoadBalancer(loadBalancerConfig); diff --git a/extensions/vertx-http/deployment/pom.xml b/extensions/vertx-http/deployment/pom.xml index f73b67dd3c39e..a5154ddd7b939 100644 --- a/extensions/vertx-http/deployment/pom.xml +++ b/extensions/vertx-http/deployment/pom.xml @@ -52,6 +52,16 @@ bootstrap provided + + org.webjars + bootstrap-multiselect + provided + + + org.webjars.npm + bootstrap-icons + provided + org.webjars font-awesome @@ -196,6 +206,55 @@ + + + org.webjars + bootstrap-multiselect + ${webjar.bootstrap-multiselect.version} + jar + true + ${project.build.directory}/classes/dev-static/js/ + **/bootstrap-multiselect.js + + + + + + org.webjars + bootstrap-multiselect + ${webjar.bootstrap-multiselect.version} + jar + true + ${project.build.directory}/classes/dev-static/css/ + **/bootstrap-multiselect.css + + + + + + org.webjars.npm + bootstrap-icons + ${webjar.bootstrap-icons.version} + jar + true + ${project.build.directory}/classes/dev-static/css/ + **/font/bootstrap-icons.css + + + + + + org.webjars.npm + bootstrap-icons + ${webjar.bootstrap-icons.version} + jar + true + ${project.build.directory}/classes/dev-static/css/fonts/ + **/font/fonts/ + + + + org.webjars diff --git a/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/devmode/console/DevConsoleProcessor.java b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/devmode/console/DevConsoleProcessor.java index 0943aeda594d1..6c34e448daabe 100644 --- a/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/devmode/console/DevConsoleProcessor.java +++ b/extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/devmode/console/DevConsoleProcessor.java @@ -64,6 +64,7 @@ import io.quarkus.devconsole.spi.DevConsoleRouteBuildItem; import io.quarkus.devconsole.spi.DevConsoleRuntimeTemplateInfoBuildItem; import io.quarkus.devconsole.spi.DevConsoleTemplateInfoBuildItem; +import io.quarkus.devconsole.spi.DevConsoleWebjarBuildItem; import io.quarkus.maven.dependency.ArtifactKey; import io.quarkus.maven.dependency.GACT; import io.quarkus.netty.runtime.virtual.VirtualChannel; @@ -88,6 +89,7 @@ import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.TemplateHtmlBuilder; import io.quarkus.utilities.OS; +import io.quarkus.vertx.http.deployment.BodyHandlerBuildItem; import io.quarkus.vertx.http.deployment.HttpRootPathBuildItem; import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem; import io.quarkus.vertx.http.deployment.RouteBuildItem; @@ -387,6 +389,46 @@ public WebJarBuildItem setupWebJar( .build(); } + @BuildStep(onlyIf = IsDevelopment.class) + public void setupDevConsoleWebjar( + List devConsoleWebjarBuildItems, + BuildProducer webJarBuildItemBuildProducer, + LaunchModeBuildItem launchModeBuildItem) { + if (launchModeBuildItem.getDevModeType().orElse(null) != DevModeType.LOCAL) { + return; + } + for (DevConsoleWebjarBuildItem devConsoleWebjar : devConsoleWebjarBuildItems) { + webJarBuildItemBuildProducer.produce(WebJarBuildItem.builder() + .artifactKey(devConsoleWebjar.getArtifactKey()) + .root(devConsoleWebjar.getRoot()) + .useDefaultQuarkusBranding(devConsoleWebjar.getUseDefaultQuarkusBranding()) + .onlyCopyNonArtifactFiles(devConsoleWebjar.getOnlyCopyNonArtifactFiles()) + .build()); + } + } + + @Record(ExecutionTime.RUNTIME_INIT) + @BuildStep(onlyIf = IsDevelopment.class) + public void setupDevConsoleRoutes( + List devConsoleWebjarBuildItems, + DevConsoleRecorder recorder, + NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, + ShutdownContextBuildItem shutdownContext, + BuildProducer routeBuildItemBuildProducer, + WebJarResultsBuildItem webJarResultsBuildItem) { + + for (DevConsoleWebjarBuildItem webjarBuildItem : devConsoleWebjarBuildItems) { + WebJarResultsBuildItem.WebJarResult result = webJarResultsBuildItem.byArtifactKey(webjarBuildItem.getArtifactKey()); + if (result == null) { + continue; + } + routeBuildItemBuildProducer.produce(nonApplicationRootPathBuildItem.routeBuilder() + .route("dev/" + webjarBuildItem.getRouteRoot() + "/*") + .handler(recorder.fileSystemStaticHandler(result.getWebRootConfigurations(), shutdownContext)) + .build()); + } + } + @BuildStep(onlyIf = { IsDevelopment.class }) public DevConsoleTemplateInfoBuildItem config(List serviceDescriptions) { return new DevConsoleTemplateInfoBuildItem("devServices", serviceDescriptions); @@ -404,7 +446,8 @@ public void setupDevConsoleRoutes( ShutdownContextBuildItem shutdownContext, BuildProducer routeBuildItemBuildProducer, WebJarResultsBuildItem webJarResultsBuildItem, - CurateOutcomeBuildItem curateOutcomeBuildItem) { + CurateOutcomeBuildItem curateOutcomeBuildItem, + BodyHandlerBuildItem bodyHandlerBuildItem) { WebJarResultsBuildItem.WebJarResult result = webJarResultsBuildItem.byArtifactKey(DEVCONSOLE_WEBJAR_ARTIFACT_KEY); @@ -432,7 +475,8 @@ public void setupDevConsoleRoutes( NonApplicationRootPathBuildItem.Builder builder = nonApplicationRootPathBuildItem.routeBuilder() .routeFunction( "dev/" + groupAndArtifact.getKey() + "." + groupAndArtifact.getValue() + "/" + i.getPath(), - new RuntimeDevConsoleRoute(i.getMethod())); + new RuntimeDevConsoleRoute(i.getMethod(), + i.isBodyHandlerRequired() ? bodyHandlerBuildItem.getHandler() : null)); if (i.isBlockingHandler()) { builder.blockingRoute(); } diff --git a/extensions/vertx-http/dev-console-spi/src/main/java/io/quarkus/devconsole/spi/DevConsoleWebjarBuildItem.java b/extensions/vertx-http/dev-console-spi/src/main/java/io/quarkus/devconsole/spi/DevConsoleWebjarBuildItem.java new file mode 100644 index 0000000000000..01d16ceea3e3c --- /dev/null +++ b/extensions/vertx-http/dev-console-spi/src/main/java/io/quarkus/devconsole/spi/DevConsoleWebjarBuildItem.java @@ -0,0 +1,105 @@ +package io.quarkus.devconsole.spi; + +import io.quarkus.builder.item.MultiBuildItem; +import io.quarkus.maven.dependency.GACT; + +public final class DevConsoleWebjarBuildItem extends MultiBuildItem { + /** + * ArtifactKey pointing to the web jar. Has to be one of the applications dependencies. + */ + private final GACT artifactKey; + + /** + * Root inside the webJar starting from which resources are unpacked. + */ + private final String root; + + /** + * Only copy resources of the webjar which are either user overridden, or contain variables. + */ + private final boolean onlyCopyNonArtifactFiles; + + /** + * Defines whether Quarkus can override resources of the webjar with Quarkus internal files. + */ + private final boolean useDefaultQuarkusBranding; + + /** + * The root of the route to expose resources of the webjar + */ + private final String routeRoot; + + private DevConsoleWebjarBuildItem(Builder builder) { + this.artifactKey = builder.artifactKey; + this.root = builder.root; + this.useDefaultQuarkusBranding = builder.useDefaultQuarkusBranding; + this.onlyCopyNonArtifactFiles = builder.onlyCopyNonArtifactFiles; + this.routeRoot = builder.routeRoot; + } + + public GACT getArtifactKey() { + return artifactKey; + } + + public String getRoot() { + return root; + } + + public boolean getUseDefaultQuarkusBranding() { + return useDefaultQuarkusBranding; + } + + public boolean getOnlyCopyNonArtifactFiles() { + return onlyCopyNonArtifactFiles; + } + + public String getRouteRoot() { + return routeRoot; + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private GACT artifactKey; + private String root; + private boolean useDefaultQuarkusBranding = true; + private boolean onlyCopyNonArtifactFiles = true; + private String routeRoot; + + public Builder artifactKey(GACT artifactKey) { + this.artifactKey = artifactKey; + return this; + } + + public Builder root(String root) { + this.root = root; + + if (this.root != null && this.root.startsWith("/")) { + this.root = this.root.substring(1); + } + + return this; + } + + public Builder routeRoot(String route) { + this.routeRoot = route; + return this; + } + + public Builder useDefaultQuarkusBranding(boolean useDefaultQuarkusBranding) { + this.useDefaultQuarkusBranding = useDefaultQuarkusBranding; + return this; + } + + public Builder onlyCopyNonArtifactFiles(boolean onlyCopyNonArtifactFiles) { + this.onlyCopyNonArtifactFiles = onlyCopyNonArtifactFiles; + return this; + } + + public DevConsoleWebjarBuildItem build() { + return new DevConsoleWebjarBuildItem(this); + } + } +} diff --git a/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/devmode/RuntimeDevConsoleRoute.java b/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/devmode/RuntimeDevConsoleRoute.java index ff5a05a705691..d3adff64c5e0a 100644 --- a/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/devmode/RuntimeDevConsoleRoute.java +++ b/extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/devmode/RuntimeDevConsoleRoute.java @@ -2,24 +2,37 @@ import java.util.function.Consumer; +import io.vertx.core.Handler; import io.vertx.core.http.HttpMethod; import io.vertx.ext.web.Route; +import io.vertx.ext.web.RoutingContext; public class RuntimeDevConsoleRoute implements Consumer { private String method; + private Handler bodyHandler; + public RuntimeDevConsoleRoute() { } - public RuntimeDevConsoleRoute(String method) { + public RuntimeDevConsoleRoute(String method, Handler hasBodyHandler) { this.method = method; + this.bodyHandler = hasBodyHandler; } public String getMethod() { return method; } + public Handler getBodyHandler() { + return bodyHandler; + } + + public void setBodyHandler(Handler bodyHandler) { + this.bodyHandler = bodyHandler; + } + public RuntimeDevConsoleRoute setMethod(String method) { this.method = method; return this; @@ -29,5 +42,8 @@ public RuntimeDevConsoleRoute setMethod(String method) { public void accept(Route route) { route.method(HttpMethod.valueOf(method)) .order(-100); + if (bodyHandler != null) { + route.handler(bodyHandler); + } } } diff --git a/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java b/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java index 43c8c218eb4cb..eeb59d0e0ab5d 100644 --- a/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java +++ b/independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/StorkClientRequestFilter.java @@ -34,7 +34,8 @@ public void filter(ResteasyReactiveClientRequestContext requestContext) { try { serviceInstance = Stork.getInstance() .getService(serviceName) - .selectInstanceAndRecordStart(measureTime); + .selectInstanceAndRecordStart(measureTime) + .log(); } catch (Throwable e) { log.error("Error selecting service instance for serviceName: " + serviceName, e); requestContext.resume(e); @@ -46,8 +47,18 @@ public void filter(ResteasyReactiveClientRequestContext requestContext) { boolean isHttps = instance.isSecure() || "storks".equals(uri.getScheme()); String scheme = isHttps ? "https" : "http"; try { + // In the case the service instance does not set the host and/or port + String host = instance.getHost() == null ? "localhost" : instance.getHost(); + int port = instance.getPort(); + if (instance.getPort() == 0) { + if (isHttps) { + port = 433; + } else { + port = 80; + } + } URI newUri = new URI(scheme, - uri.getUserInfo(), instance.getHost(), instance.getPort(), + uri.getUserInfo(), host, port, uri.getPath(), uri.getQuery(), uri.getFragment()); requestContext.setUri(newUri); if (measureTime && instance.gatherStatistics()) { diff --git a/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/MediaTypeHelper.java b/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/MediaTypeHelper.java index f7e5e84006736..6167efa7495ef 100644 --- a/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/MediaTypeHelper.java +++ b/independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/util/MediaTypeHelper.java @@ -33,7 +33,9 @@ private static float getQTypeWithParamInfo(MediaType type, String parameterName) return rtn; } } catch (NumberFormatException e) { - throw new RuntimeException(String.format("Media type %s value must be a float: %s", parameterName, type), e); + throw new WebApplicationException( + String.format("Media type %s value must be a float: %s", parameterName, type), + Response.Status.BAD_REQUEST); } } return 2.0f; diff --git a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/startup/RuntimeResourceDeployment.java b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/startup/RuntimeResourceDeployment.java index 1306ad35f371a..45f83539204ce 100644 --- a/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/startup/RuntimeResourceDeployment.java +++ b/independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/startup/RuntimeResourceDeployment.java @@ -8,6 +8,7 @@ import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; +import java.lang.reflect.WildcardType; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; @@ -503,7 +504,27 @@ private static void smartInitParameterConverter(int i, ParameterConverter quarku if (genericParameterTypes[i] instanceof ParameterizedType) { Type[] genericArguments = ((ParameterizedType) genericParameterTypes[i]).getActualTypeArguments(); if (genericArguments.length == 1) { - quarkusConverter.init(paramConverterProviders, loadClass(genericArguments[0].getTypeName()), + String genericTypeClassName = null; + Type genericType = genericArguments[0]; + if (genericType instanceof Class) { + genericTypeClassName = ((Class) genericType).getName(); + } else if (genericType instanceof WildcardType) { + WildcardType genericTypeWildcardType = (WildcardType) genericType; + Type[] upperBounds = genericTypeWildcardType.getUpperBounds(); + Type[] lowerBounds = genericTypeWildcardType.getLowerBounds(); + if ((lowerBounds.length == 0) && (upperBounds.length == 1)) { + Type genericTypeUpperBoundType = upperBounds[0]; + if (genericTypeUpperBoundType instanceof Class) { + genericTypeClassName = ((Class) genericTypeUpperBoundType).getName(); + } + } + } + //TODO: are there any other cases we can support? + if (genericTypeClassName == null) { + throw new IllegalArgumentException( + "Unable to support parameter converter with type: '" + genericType.getTypeName() + "'"); + } + quarkusConverter.init(paramConverterProviders, loadClass(genericTypeClassName), genericArguments[0], parameterAnnotations[i]); return; diff --git a/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resource/basic/MatchedResourceTest.java b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resource/basic/MatchedResourceTest.java index 63cd7fad61906..838872a75541e 100644 --- a/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resource/basic/MatchedResourceTest.java +++ b/independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resource/basic/MatchedResourceTest.java @@ -91,6 +91,17 @@ public void testMatch() throws Exception { response.close(); } + @Test + @DisplayName("Test Invalid Q Value") + public void testInvalidQValue() throws Exception { + WebTarget base = client.target(generateURL("/match")); + Response response = base.request() + .header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=malformed") + .get(); + Assertions.assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), response.getStatus()); + response.close(); + } + public void generalPostTest(String uri, String value) { WebTarget base = client.target(uri); Response response = base.request().get(); diff --git a/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/CacheSetup.java b/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/CacheSetup.java index f04c346a7fea5..b12c898043842 100644 --- a/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/CacheSetup.java +++ b/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/CacheSetup.java @@ -31,6 +31,7 @@ import org.infinispan.query.dsl.Query; import org.infinispan.query.dsl.QueryFactory; +import io.quarkus.infinispan.client.Remote; import io.quarkus.runtime.StartupEvent; @ApplicationScoped @@ -40,10 +41,15 @@ public class CacheSetup { public static final String DEFAULT_CACHE = "default"; public static final String MAGAZINE_CACHE = "magazine"; + public static final String AUTHORS_CACHE = "authors"; @Inject RemoteCacheManager cacheManager; + @Inject + @Remote(AUTHORS_CACHE) + RemoteCache authors; + private final Map matches = new ConcurrentHashMap<>(); private CountDownLatch waitUntilStarted = new CountDownLatch(1); @@ -51,6 +57,7 @@ public class CacheSetup { void onStart(@Observes StartupEvent ev) { RemoteCache defaultCache = cacheManager.getCache(DEFAULT_CACHE); RemoteCache magazineCache = cacheManager.getCache(MAGAZINE_CACHE); + defaultCache.addClientListener(new EventPrintListener()); ContinuousQuery continuousQuery = Search.getContinuousQuery(defaultCache); @@ -81,8 +88,10 @@ public void resultUpdated(String key, Book value) { log.info("Added continuous query listener"); + Author gMartin = new Author("George", "Martin"); + defaultCache.put("book1", new Book("Game of Thrones", "Lots of people perish", 2010, - Collections.singleton(new Author("George", "Martin")), Type.FANTASY, new BigDecimal("23.99"))); + Collections.singleton(gMartin), Type.FANTASY, new BigDecimal("23.99"))); defaultCache.put("book2", new Book("Game of Thrones Path 2", "They win?", 2023, Collections.singleton(new Author("Son", "Martin")), Type.FANTASY, new BigDecimal("54.99"))); @@ -94,6 +103,8 @@ public void resultUpdated(String key, Book value) { magazineCache.put("popular-time", new Magazine("TIME", YearMonth.of(1997, 4), Arrays.asList("Yep, I'm gay", "Backlash against HMOS", "False Hope on Breast Cancer?"))); + authors.put("aut-1", gMartin); + waitUntilStarted.countDown(); } diff --git a/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/TestServlet.java b/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/TestServlet.java index a1cec8db1b943..7352e9f637d16 100644 --- a/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/TestServlet.java +++ b/integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/TestServlet.java @@ -46,6 +46,10 @@ public class TestServlet { @Remote(CacheSetup.MAGAZINE_CACHE) RemoteCache magazineCache; + @Inject + @Remote(CacheSetup.AUTHORS_CACHE) + RemoteCache authorsCache; + @Inject CounterManager counterManager; @@ -234,4 +238,13 @@ public String magazineQuery(@PathParam("id") String name) { .map(m -> m.getName() + ":" + m.getPublicationYearMonth()) .collect(Collectors.joining(",", "[", "]")); } + + @Path("create-cache-default-config/authors") + @GET + public String magazineQuery() { + cacheSetup.ensureStarted(); + return authorsCache.values().stream() + .map(a -> a.getName()) + .collect(Collectors.joining(",", "[", "]")); + } } diff --git a/integration-tests/infinispan-client/src/test/java/io/quarkus/it/infinispan/client/InfinispanClientFunctionalityTest.java b/integration-tests/infinispan-client/src/test/java/io/quarkus/it/infinispan/client/InfinispanClientFunctionalityTest.java index c490f8d99ba63..9de116a8d3964 100644 --- a/integration-tests/infinispan-client/src/test/java/io/quarkus/it/infinispan/client/InfinispanClientFunctionalityTest.java +++ b/integration-tests/infinispan-client/src/test/java/io/quarkus/it/infinispan/client/InfinispanClientFunctionalityTest.java @@ -51,4 +51,9 @@ public void testNearCacheInvalidation() { public void testQueryWithCustomMarshaller() { RestAssured.when().get("/test/magazinequery/IM").then().body(is("[TIME:1923-03,TIME:1997-04]")); } + + @Test + public void testAuthor() { + RestAssured.when().get("/test/create-cache-default-config/authors").then().body(is("[George]")); + } } diff --git a/integration-tests/logging-min-level-set/src/main/java/io/quarkus/it/logging/minlevel/set/filter/LoggingFilter.java b/integration-tests/logging-min-level-set/src/main/java/io/quarkus/it/logging/minlevel/set/filter/LoggingFilter.java new file mode 100644 index 0000000000000..4410b4cdd7ee6 --- /dev/null +++ b/integration-tests/logging-min-level-set/src/main/java/io/quarkus/it/logging/minlevel/set/filter/LoggingFilter.java @@ -0,0 +1,31 @@ +package io.quarkus.it.logging.minlevel.set.filter; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import org.jboss.logging.Logger; + +import io.quarkus.it.logging.minlevel.set.LoggingWitness; + +@Path("/log/filter") +public class LoggingFilter { + + static final Logger LOG = Logger.getLogger(LoggingFilter.class); + + @GET + @Path("/filtered") + @Produces(MediaType.TEXT_PLAIN) + public boolean filtered() { + return LoggingWitness.loggedWarn("TEST warn message", LOG); + } + + @GET + @Path("/not-filtered") + @Produces(MediaType.TEXT_PLAIN) + public boolean notFiltered() { + return LoggingWitness.loggedWarn("warn message", LOG); + } + +} diff --git a/integration-tests/logging-min-level-set/src/main/java/io/quarkus/it/logging/minlevel/set/filter/TestFilter.java b/integration-tests/logging-min-level-set/src/main/java/io/quarkus/it/logging/minlevel/set/filter/TestFilter.java new file mode 100644 index 0000000000000..30163df431240 --- /dev/null +++ b/integration-tests/logging-min-level-set/src/main/java/io/quarkus/it/logging/minlevel/set/filter/TestFilter.java @@ -0,0 +1,15 @@ +package io.quarkus.it.logging.minlevel.set.filter; + +import java.util.logging.Filter; +import java.util.logging.LogRecord; + +import io.quarkus.logging.LoggingFilter; + +@LoggingFilter(name = "my-filter") +public final class TestFilter implements Filter { + + @Override + public boolean isLoggable(LogRecord record) { + return !record.getMessage().contains("TEST"); + } +} diff --git a/integration-tests/logging-min-level-set/src/main/resources/application.properties b/integration-tests/logging-min-level-set/src/main/resources/application.properties index 2e1c47781c2c4..e33158f666c73 100644 --- a/integration-tests/logging-min-level-set/src/main/resources/application.properties +++ b/integration-tests/logging-min-level-set/src/main/resources/application.properties @@ -3,3 +3,4 @@ quarkus.log.category."io.quarkus.it.logging.minlevel.set.above".min-level=INFO quarkus.log.category."io.quarkus.it.logging.minlevel.set.below".min-level=TRACE quarkus.log.category."io.quarkus.it.logging.minlevel.set.below.child".min-level=inherit quarkus.log.category."io.quarkus.it.logging.minlevel.set.promote".min-level=ERROR +quarkus.log.console.filter=my-filter diff --git a/integration-tests/logging-min-level-set/src/test/java/io/quarkus/it/logging/minlevel/set/LoggingFilterTest.java b/integration-tests/logging-min-level-set/src/test/java/io/quarkus/it/logging/minlevel/set/LoggingFilterTest.java new file mode 100644 index 0000000000000..2556243f76013 --- /dev/null +++ b/integration-tests/logging-min-level-set/src/test/java/io/quarkus/it/logging/minlevel/set/LoggingFilterTest.java @@ -0,0 +1,33 @@ +package io.quarkus.it.logging.minlevel.set; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; + +import org.junit.jupiter.api.Test; + +import io.quarkus.test.common.QuarkusTestResource; +import io.quarkus.test.junit.QuarkusTest; + +@QuarkusTest +@QuarkusTestResource(SetRuntimeLogLevels.class) +public class LoggingFilterTest { + + @Test + public void testFiltered() { + given() + .when().get("/log/filter/filtered") + .then() + .statusCode(200) + .body(is("false")); + } + + @Test + public void testNotFiltered() { + given() + .when().get("/log/filter/not-filtered") + .then() + .statusCode(200) + .body(is("true")); + } + +} diff --git a/jakarta/rewrite.yml b/jakarta/rewrite.yml index 075aafd0d1432..8b37a399dfdc6 100644 --- a/jakarta/rewrite.yml +++ b/jakarta/rewrite.yml @@ -339,7 +339,7 @@ recipeList: addIfMissing: true - org.openrewrite.maven.ChangePropertyValue: key: jboss-logmanager.version - newValue: 1.1.0 + newValue: 1.1.1 --- type: specs.openrewrite.org/v1beta/recipe name: io.quarkus.jakarta-json-switch @@ -547,7 +547,7 @@ recipeList: newValue: 2.0.0.RC8 - org.openrewrite.maven.ChangePropertyValue: key: smallrye-health.version - newValue: 4.0.0-RC2 + newValue: 4.0.0 - org.openrewrite.maven.ChangePropertyValue: key: microprofile-jwt.version newValue: 2.0