diff --git a/.github/ci-prerequisites.sh b/.github/ci-prerequisites.sh index 94a0a47e575523..5c8773b20939d1 100755 --- a/.github/ci-prerequisites.sh +++ b/.github/ci-prerequisites.sh @@ -41,8 +41,19 @@ time sudo rm -rf /usr/share/swift || true time sudo rm -rf /usr/local/lib/android || true # Remove Haskell time sudo rm -rf /opt/ghc || true +time sudo rm -rf /usr/local/.ghcup || true # Remove pipx time sudo rm -rf /opt/pipx || true +# Remove Rust +time sudo rm -rf /usr/share/rust || true +# Remove Go +time sudo rm -rf /usr/local/go || true +# Remove miniconda +time sudo rm -rf /usr/share/miniconda || true +# Remove powershell +time sudo rm -rf /usr/local/share/powershell || true +# Remove Google Cloud SDK +time sudo rm -rf /usr/lib/google-cloud-sdk || true # Remove infrastructure things that are unused and take a lot of space time sudo rm -rf /opt/hostedtoolcache/CodeQL || true diff --git a/.github/native-tests.json b/.github/native-tests.json index 11d2eca78c18ac..828044748e0c24 100644 --- a/.github/native-tests.json +++ b/.github/native-tests.json @@ -128,8 +128,8 @@ }, { "category": "gRPC", - "timeout": 70, - "test-modules": "grpc-health, grpc-interceptors, grpc-mutual-auth, grpc-plain-text-gzip, grpc-plain-text-mutiny, grpc-proto-v2, grpc-streaming, grpc-tls", + "timeout": 75, + "test-modules": "grpc-health, grpc-interceptors, grpc-mutual-auth, grpc-plain-text-gzip, grpc-plain-text-mutiny, grpc-proto-v2, grpc-streaming, grpc-tls, grpc-tls-p12", "os-name": "ubuntu-latest" }, { diff --git a/.github/quarkus-github-bot.yml b/.github/quarkus-github-bot.yml index ae7233040250eb..f993137b1ecb46 100644 --- a/.github/quarkus-github-bot.yml +++ b/.github/quarkus-github-bot.yml @@ -383,13 +383,13 @@ triage: - id: scheduler labels: [area/scheduler] title: "schedule(r)?" - notify: [mkouba] + notify: [mkouba, manovotn] directories: - extensions/scheduler/ - id: quartz labels: [area/scheduler] title: "quartz" - notify: [mkouba, machi1990] + notify: [mkouba, machi1990, manovotn] directories: - extensions/quartz/ - integration-tests/quartz/ diff --git a/.github/workflows/ci-actions-incremental.yml b/.github/workflows/ci-actions-incremental.yml index 11c9a43357c298..a34518ee35d893 100644 --- a/.github/workflows/ci-actions-incremental.yml +++ b/.github/workflows/ci-actions-incremental.yml @@ -376,6 +376,9 @@ jobs: - name: Clean Gradle temp directory if: always() run: devtools/gradle/gradlew --stop && rm -rf devtools/gradle/gradle-extension-plugin/build/tmp + - name: Analyze disk space + if: always() && !startsWith(matrix.java.os-name, 'windows') && !startsWith(matrix.java.os-name, 'macos') + run: .github/ci-disk-usage.sh - name: Prepare failure archive (if maven failed) if: failure() run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - @@ -1072,6 +1075,11 @@ jobs: build-scan-capture-strategy: ON_DEMAND job-name: "Native Tests - ${{matrix.category}}" wrapper-init: true + - name: Cache Quarkus metadata + uses: actions/cache@v4 + with: + path: '**/.quarkus/quarkus-prod-config-dump' + key: ${{ runner.os }}-quarkus-metadata - name: Build env: TEST_MODULES: ${{matrix.test-modules}} diff --git a/.github/workflows/podman-build.yml b/.github/workflows/podman-build.yml index b62a09c77344b3..2f4811a99b2ae3 100644 --- a/.github/workflows/podman-build.yml +++ b/.github/workflows/podman-build.yml @@ -74,6 +74,7 @@ jobs: | sudo tee /etc/apt/sources.list.d/devel:kubic:libcontainers:unstable.list > /dev/null sudo apt-get update -qq sudo apt-get -qq -y install podman + sudo bash -c "echo -e '[engine]\nservice_timeout=0' >> /etc/containers/containers.conf" # Runs a single command using the runners shell - name: Check podman run: docker version @@ -89,7 +90,7 @@ jobs: key: q2maven-${{ steps.get-date.outputs.date }} - name: Initial build run: | - ./mvnw -T1C $COMMON_MAVEN_ARGS -DskipTests -DskipITs -Dinvoker.skip -Dno-format -Dtcks -Prelocations clean install + ./mvnw -T1C $COMMON_MAVEN_ARGS -DskipTests -DskipITs -DskipDocs -Dinvoker.skip -Dskip.gradle.tests -Djbang.skip -Dtruststore.skip -Dno-format -Dtcks -Prelocations clean install - name: Verify extension dependencies shell: bash run: ./update-extension-dependencies.sh $COMMON_MAVEN_ARGS @@ -149,7 +150,7 @@ jobs: - name: Build shell: bash # Despite the pre-calculated run_jvm flag, GIB has to be re-run here to figure out the exact submodules to build. - run: ./mvnw $COMMON_MAVEN_ARGS install -Dsurefire.timeout=1200 -pl !integration-tests/gradle -pl !integration-tests/maven -pl !integration-tests/devtools -pl !docs $JVM_TEST_MAVEN_ARGS ${{ needs.build-jdk11.outputs.gib_args }} + run: ./mvnw $COMMON_MAVEN_ARGS install -Dsurefire.timeout=1200 -pl !integration-tests/gradle -pl !integration-tests/maven -pl !integration-tests/devmode -pl !integration-tests/devtools -Dno-test-kubernetes -pl !docs $JVM_TEST_MAVEN_ARGS ${{ steps.get-gib-args.outputs.gib_args }} - name: Delete Local Artifacts From Cache shell: bash run: rm -r ~/.m2/repository/io/quarkus diff --git a/.gitpod/Dockerfile b/.gitpod/Dockerfile index ed3ace5b124b45..47b829d212849b 100644 --- a/.gitpod/Dockerfile +++ b/.gitpod/Dockerfile @@ -1,8 +1,8 @@ FROM gitpod/workspace-java-17 RUN bash -c ". /home/gitpod/.sdkman/bin/sdkman-init.sh && \ - sdk install java 17.0.9-tem && \ - sdk use java 17.0.9-tem && \ + sdk install java 17.0.10-tem && \ + sdk use java 17.0.10-tem && \ yes | sdk install quarkus && \ rm -rf $HOME/.sdkman/archives/* && \ rm -rf $HOME/.sdkman/tmp/* " diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml index 1cb948d1490672..08c504fc6bc7f1 100644 --- a/.mvn/extensions.xml +++ b/.mvn/extensions.xml @@ -2,11 +2,21 @@ com.gradle gradle-enterprise-maven-extension - 1.20 + 1.20.1 com.gradle common-custom-user-data-maven-extension 1.12.5 + + com.gradle + quarkus-build-caching-extension + 0.10 + + + io.quarkus.develocity + quarkus-project-develocity-extension + 1.0.6 + diff --git a/.mvn/gradle-enterprise-custom-user-data.groovy b/.mvn/gradle-enterprise-custom-user-data.groovy deleted file mode 100644 index 0f9b416591ea7f..00000000000000 --- a/.mvn/gradle-enterprise-custom-user-data.groovy +++ /dev/null @@ -1,129 +0,0 @@ - -// Configure build scan publication -boolean publish = true -if(session?.getRequest()?.getBaseDirectory() != null) { - def testBuildPaths = [ - File.separator + 'target' + File.separator + 'codestart-test' + File.separator, - File.separator + 'target' + File.separator + 'it' + File.separator, - File.separator + 'target' + File.separator + 'test-classes' + File.separator, - File.separator + 'target' + File.separator + 'test-project' + File.separator - ] - publish = testBuildPaths.every {testBuildPath -> !session.getRequest().getBaseDirectory().contains(testBuildPath) } - if(!publish) { - // do not publish a build scan for test builds - log.debug("Disabling build scan publication for " + session.getRequest().getBaseDirectory()) - - // change storage location on CI to avoid Develocity scan dumps with disabled publication to be captured for republication - if (System.env.GITHUB_ACTIONS) { - try { - def storageLocationTmpDir = java.nio.file.Files.createTempDirectory(java.nio.file.Paths.get(System.env.RUNNER_TEMP), "buildScanTmp").toAbsolutePath() - log.debug('Update storage location to ' + storageLocationTmpDir) - gradleEnterprise.setStorageDirectory(storageLocationTmpDir) - } catch (IOException e) { - log.error('Temporary storage location directory cannot be created, the Build Scan will be published', e) - } - } - } -} -buildScan.publishAlwaysIf(publish) -buildScan.publishIfAuthenticated() - -// Add mvn command line -def mvnCommand = '' -if (System.env.MAVEN_CMD_LINE_ARGS) { - mvnCommand = "mvn ${System.env.MAVEN_CMD_LINE_ARGS}".toString() - buildScan.value('mvn command line', mvnCommand) -} - -//Add github action information -if (System.env.GITHUB_ACTIONS) { - def jobId = System.env.GITHUB_JOB - - buildScan.value('gh-job-id', jobId) - buildScan.value('gh-event-name', System.env.GITHUB_EVENT_NAME) - buildScan.value('gh-ref-name', System.env.GITHUB_REF_NAME) - buildScan.value('gh-actor', System.env.GITHUB_ACTOR) - buildScan.value('gh-workflow', System.env.GITHUB_WORKFLOW) - String jobCustomValues = System.env.GE_CUSTOM_VALUES - if (jobCustomValues != null && !jobCustomValues.isBlank()) { - for (String jobCustomValue : jobCustomValues.split(",")) { - int index = jobCustomValue.indexOf('=') - if (index <= 0) { - continue - } - buildScan.value(jobCustomValue.substring(0, index).trim(), jobCustomValue.substring(index + 1).trim()) - } - } - - List similarBuildsTags = new ArrayList<>() - - buildScan.tag(jobId) - similarBuildsTags.add(jobId) - - buildScan.tag(System.env.GITHUB_EVENT_NAME) - similarBuildsTags.add(System.env.GITHUB_EVENT_NAME) - - buildScan.tag(System.env.GITHUB_WORKFLOW) - similarBuildsTags.add(System.env.GITHUB_WORKFLOW) - - String jobTags = System.env.GE_TAGS - if (jobTags != null && !jobTags.isBlank()) { - for (String tag : jobTags.split(",")) { - buildScan.tag(tag.trim()) - similarBuildsTags.add(tag.trim()) - } - } - - buildScan.link('Workflow run', System.env.GITHUB_SERVER_URL + '/' + System.env.GITHUB_REPOSITORY + '/actions/runs/' + System.env.GITHUB_RUN_ID) - - def prNumber = System.env.PULL_REQUEST_NUMBER - if (prNumber != null && !prNumber.isBlank()) { - buildScan.value('gh-pr', prNumber) - buildScan.tag('pr-' + prNumber) - similarBuildsTags.add('pr-' + prNumber) - - buildScan.link('Pull request', System.env.GITHUB_SERVER_URL + '/' + System.env.GITHUB_REPOSITORY + '/pull/' + prNumber ) - } - - similarBuildsTags.add(System.env.RUNNER_OS) - - buildScan.link('Similar builds', 'https://ge.quarkus.io/scans?search.tags=' + java.net.URLEncoder.encode(String.join(",", similarBuildsTags), "UTF-8").replace("+", "%20")) - - buildScan.buildScanPublished { publishedBuildScan -> { - File target = new File("target") - if (!target.exists()) { - target.mkdir() - } - File gradleBuildScanUrlFile = new File("target/gradle-build-scan-url.txt") - if (!gradleBuildScanUrlFile.exists()) { - gradleBuildScanUrlFile.withWriter { out -> - out.print(publishedBuildScan.buildScanUri) - } - } - new File(System.env.GITHUB_STEP_SUMMARY).withWriterAppend { out -> - out.println("\n[Build scan](${publishedBuildScan.buildScanUri})\n`${mvnCommand}`\n\n") - } - } - } -} - -// Check runtime Maven version and Maven Wrapper version are aligned -def runtimeInfo = (org.apache.maven.rtinfo.RuntimeInformation) session.lookup("org.apache.maven.rtinfo.RuntimeInformation") -def runtimeMavenVersion = runtimeInfo?.getMavenVersion() -Properties mavenWrapperProperties = new Properties() -File mavenWrapperPropertiesFile = new File(".mvn/wrapper/maven-wrapper.properties") -if(mavenWrapperPropertiesFile.exists()) { - mavenWrapperPropertiesFile.withInputStream { - mavenWrapperProperties.load(it) - } - // assuming the wrapper properties contains: - // distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/VERSION/apache-maven-VERSION-bin.zip - if(regexp = mavenWrapperProperties."distributionUrl" =~ /.*\/apache-maven-(.*)-bin\.zip/) { - def wrapperMavenVersion = regexp.group(1) - if (runtimeMavenVersion && wrapperMavenVersion && wrapperMavenVersion != runtimeMavenVersion) { - log.warn("Maven Wrapper is configured with a different version (" + wrapperMavenVersion + ") than the runtime version (" + runtimeMavenVersion + "). This will negatively impact build consistency and build caching.") - buildScan.tag("misaligned-maven-version") - buildScan.value("wrapper-maven-version", wrapperMavenVersion) - } - } -} diff --git a/.mvn/gradle-enterprise.xml b/.mvn/gradle-enterprise.xml index a9479d5aaebca6..3d9c369b492e2e 100644 --- a/.mvn/gradle-enterprise.xml +++ b/.mvn/gradle-enterprise.xml @@ -26,7 +26,7 @@ - #{env['GRADLE_LOCAL_BUILD_CACHE'] != null and env['RELEASE_GITHUB_TOKEN'] == null and properties['no-build-cache'] == null} + #{env['RELEASE_GITHUB_TOKEN'] == null and properties['no-build-cache'] == null} #{env['RELEASE_GITHUB_TOKEN'] == null and properties['no-build-cache'] == null} diff --git a/.sdkmanrc b/.sdkmanrc index 12624dacd0b266..d5b65f8a6ca4bd 100644 --- a/.sdkmanrc +++ b/.sdkmanrc @@ -1,4 +1,4 @@ # Enable auto-env through the sdkman_auto_env config # Add key=value pairs of SDKs to use below -java=17.0.9-tem +java=17.0.10-tem mvnd=1.0-m7-m39 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e3933a5d32d156..527731b60c7be4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,57 +4,56 @@ We try to make it easy, and all contributions, even the smaller ones, are more than welcome. This includes bug reports, fixes, documentation, examples... But first, read this page (including the small print at the end). -* [Legal](#legal) -* [Reporting an issue](#reporting-an-issue) -* [Checking an issue is fixed in main](#checking-an-issue-is-fixed-in-main) - + [Using snapshots](#using-snapshots) - + [Building main](#building-main) - + [Updating the version](#updating-the-version) -* [Before you contribute](#before-you-contribute) - + [Code reviews](#code-reviews) - + [Coding Guidelines](#coding-guidelines) - + [Continuous Integration](#continuous-integration) - + [Tests and documentation are not optional](#tests-and-documentation-are-not-optional) -* [Setup](#setup) - + [IDE Config and Code Style](#ide-config-and-code-style) - - [Eclipse Setup](#eclipse-setup) - - [IDEA Setup](#idea-setup) - * [How to work](#how-to-work) - * [`OutOfMemoryError` while importing](#-outofmemoryerror--while-importing) - * [`package sun.misc does not exist` while building](#-package-sunmisc-does-not-exist--while-building) - * [Formatting](#formatting) - + [Gitpod](#gitpod) -* [Build](#build) - + [Workflow tips](#workflow-tips) - - [Building all modules of an extension](#building-all-modules-of-an-extension) - - [Building a single module of an extension](#building-a-single-module-of-an-extension) - - [Building with relocations](#building-with-relocations) - - [Running a single test](#running-a-single-test) - * [Maven Invoker tests](#maven-invoker-tests) - + [Build with multiple threads](#build-with-multiple-threads) - + [Don't build any test modules](#don-t-build-any-test-modules) - - [Automatic incremental build](#automatic-incremental-build) - * [Special case `bom-descriptor-json`](#special-case--bom-descriptor-json-) - * [Usage by CI](#usage-by-ci) -* [Release your own version](#release) -* [Documentation](#documentation) - + [Building the documentation](#building-the-documentation) - + [Referencing a new guide in the index](#referencing-a-new-guide-in-the-index) -* [Usage](#usage) - - [With Maven](#with-maven) - - [With Gradle](#with-gradle) - - + [MicroProfile TCK's](#microprofile-tck-s) - + [Test Coverage](#test-coverage) -* [Extensions](#extensions) - + [Descriptions](#descriptions) - + [Update dependencies to extensions](#update-dependencies-to-extensions) - + [Check security vulnerabilities](#check-security-vulnerabilities) -* [The small print](#the-small-print) -* [Frequently Asked Questions](#frequently-asked-questions) - -Table of contents generated with -markdown-toc +- [Legal](#legal) +- [Reporting an issue](#reporting-an-issue) +- [Checking an issue is fixed in main](#checking-an-issue-is-fixed-in-main) + * [Using snapshots](#using-snapshots) + * [Building main](#building-main) + * [Updating the version](#updating-the-version) +- [Before you contribute](#before-you-contribute) + * [Code reviews](#code-reviews) + * [Coding Guidelines](#coding-guidelines) + * [Continuous Integration](#continuous-integration) + * [Tests and documentation are not optional](#tests-and-documentation-are-not-optional) +- [Setup](#setup) + * [IDE Config and Code Style](#ide-config-and-code-style) + + [Eclipse Setup](#eclipse-setup) + + [IDEA Setup](#idea-setup) + - [How to work](#how-to-work) + - [`OutOfMemoryError` while importing](#-outofmemoryerror--while-importing) + - [`package sun.misc does not exist` while building](#-package-sunmisc-does-not-exist--while-building) + - [Formatting](#formatting) + * [Gitpod](#gitpod) +- [Build](#build) + * [Workflow tips](#workflow-tips) + + [Building all modules of an extension](#building-all-modules-of-an-extension) + + [Building a single module of an extension](#building-a-single-module-of-an-extension) + + [Building with relocations](#building-with-relocations) + + [Running a single test](#running-a-single-test) + - [Maven Invoker tests](#maven-invoker-tests) + * [Build with multiple threads](#build-with-multiple-threads) + * [Don't build any test modules](#don-t-build-any-test-modules) + + [Automatic incremental build](#automatic-incremental-build) + - [Special case `bom-descriptor-json`](#special-case--bom-descriptor-json-) + - [Usage by CI](#usage-by-ci) + - [Develocity build cache](#develocity-build-cache) +- [Release your own version](#release-your-own-version) +- [Documentation](#documentation) + * [Building the documentation](#building-the-documentation) + * [Referencing a new guide in the index](#referencing-a-new-guide-in-the-index) +- [Usage](#usage) + + [With Maven](#with-maven) + + [With Gradle](#with-gradle) + * [MicroProfile TCK's](#microprofile-tck-s) + * [Test Coverage](#test-coverage) +- [Extensions](#extensions) + * [Descriptions](#descriptions) + * [Update dependencies to extensions](#update-dependencies-to-extensions) + * [Check security vulnerabilities](#check-security-vulnerabilities) +- [The small print](#the-small-print) +- [Frequently Asked Questions](#frequently-asked-questions) + +Table of contents generated with markdown-toc ## Legal @@ -531,21 +530,74 @@ CI is using a slightly different GIB config than locally: For more details see the `Get GIB arguments` step in `.github/workflows/ci-actions-incremental.yml`. -##### Gradle Enterprise build cache +##### Develocity build cache -Quarkus has a Gradle Enterprise setup at https://ge.quarkus.io that can be used to analyze the build performance of the Quarkus project. +###### Getting set up -Locally you can use `-Dgradle.cache.local.enabled=true` to enable the local Gradle Enterprise cache. This can speed up the build significantly. It is still considered experimental but can be used for local development. +Quarkus has a Develocity instance set up at https://ge.quarkus.io that can be used to analyze the build performance of the Quarkus project and also provides build cache services. -If you have a need or interest to report build times, you will need to get an API key for the GE instance. It is mainly relevant for those working on optimizing the Quarkus build. Ping on quarkus-dev mailing list or on Zulip if you need one. +If you have an account on https://ge.quarkus.io, this can speed up your local builds significantly. -When you have the account setup you run `mvn gradle-enterprise:provision-access-key` and login - from then on build time info will be sent to the GE instance. -You can alternatively also generate an API key from the GE UI and then use an environment variable like this: +If you have a need or interest to share your build scans and use the build cache, you will need to get an account for the Develocity instance. +It is only relevant for members of the Quarkus team and you should contact either Guillaume Smet or Max Andersen to set up your account. + +When you have the account set up, from the root of your local Quarkus workspace, run: + +``` +./mvnw gradle-enterprise:provision-access-key +``` + +and log in in the browser window it will open (if not already logged in). +Your access key will be stored in the `~/.m2/.gradle-enterprise/keys.properties` file. +From then your build scans will be sent to the Develocity instance and you will be able to benefit from the build cache. + +You can alternatively also generate an API key from the Develocity UI and then use an environment variable like this: ``` export GRADLE_ENTERPRISE_ACCESS_KEY=ge.quarkus.io=a_secret_key ``` +When debugging a test (and especially flaky tests), you might want to temporarily disable the build cache. +You can easily do it by adding `-Dno-build-cache` to your Maven command. + +The remote cache is stored on the Develocity server and is populated by CI. +To be able to benefit from the remote cache, you need to use a Java version tested on CI (at the moment, either 17 or 21) and the same Maven version (thus why it is recommended to use the Maven wrapper aka `./mvnw`). +Note that the local cache alone should bring you a significant speedup. + +The local cache is stored in the `~/.m2/.gradle-enterprise/build-cache/` directory. +If you have problems with your local cache, you can delete this directory. + +###### -Dquickly + +When using `-Dquickly` with no goals, Develocity is unable to detect that the `clean` goal is present. +We worked around it but you will get the following warnings at the beginning of your build output: + +``` +[WARNING] Build cache entries produced by this build may be incorrect since the clean lifecycle is not part of the build invocation. +[WARNING] You must only invoke the build without the clean lifecycle if the build is started from a clean working directory. +``` + +You can safely ignore them. + +###### Benchmarking the build + +During the experiment phase, there might be a need to benchmark the build in a reliable manner. + +For this, we can use the [Gradle Profiler](https://github.com/gradle/gradle-profiler). +It can be installed with SDKMAN! (`sdk install gradleprofiler`) or Homebrew (`brew install gradle-profiler`). + +Then we can run the following commands at the root of the Quarkus project: + +``` +# Without cache +gradle-profiler --maven --benchmark --scenario-file build.scenario clean_install_no_cache + +# With cache +gradle-profiler --maven --benchmark --scenario-file build.scenario clean_install +``` + +Simple HTML reports will be published in the `profile_out*` directories. + ## Release your own version You might want to release your own patched version of Quarkus to an internal repository. diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 645b3af610c91f..2d11ea03f1d0a7 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -14,7 +14,7 @@ pom - 2.0.1 + 2.0.2 1.77 1.0.2.4 1.0.18 @@ -25,7 +25,7 @@ 1 1.1.5 2.1.5.Final - 3.1.1.Final + 3.1.2.Final 6.2.7.Final 0.33.0 0.2.4 @@ -34,8 +34,8 @@ 1.32.0 1.32.0-alpha 1.21.0-alpha - 5.1.0.Final - 1.12.2 + 5.2.0.Final + 1.12.3 2.1.12 0.22.0 21.1 @@ -50,21 +50,21 @@ 2.1 2.0 3.1.1 - 2.2.0 - 3.5.4 + 2.3.0 + 3.6.0 4.1.0 4.0.0 - 3.9.0 + 3.10.0 2.7.0 6.2.6 4.4.0 2.1.0 1.0.13 3.0.1 - 3.8.0 - 4.16.0 + 3.10.0 + 4.18.0 2.5.0 - 2.1.2 + 2.1.3 2.1.1 3.0.0 2.1.0 @@ -94,23 +94,23 @@ 2.16.1 1.0.0.Final 3.14.0 - 1.16.0 + 1.16.1 1.7.0 - 6.4.3.Final - 1.14.7 + 6.4.4.Final + 1.14.11 6.0.6.Final 2.2.2.Final 8.0.1.Final - 7.0.0.Final + 7.1.0.Final 7.0.0.Final - 2.1 + 2.3 8.0.0.Final - 8.12.1 + 8.12.2 2.2.21 2.2.5.Final 2.2.2.Final @@ -119,17 +119,17 @@ 2.0.0.Final 1.7.0.Final 1.0.1.Final - 2.2.3.Final + 2.3.1.Final 3.5.1.Final - 4.5.3 + 4.5.4 4.5.14 4.4.16 4.1.5 9.2.1 2.3.2 2.2.224 - 42.7.1 - 3.3.2 + 42.7.2 + 3.3.3 8.3.0 12.4.2.jre11 1.6.7 @@ -140,28 +140,27 @@ 5.4.0 2.2 5.10.2 - 1.5.0 - 14.0.24.Final - 4.6.5.Final + 15.0.0.CR1 + 5.0.0.CR2 3.1.5 - 4.1.106.Final - 1.12.0 + 4.1.107.Final + 1.14.0 1.0.4 3.5.3.Final - 2.5.6 - 3.6.1 + 2.5.7 + 3.7.0 1.8.0 1.1.10.5 0.100.0 - 2.13.12 + 2.13.13 1.2.3 3.11.3 - 2.15.0 + 2.15.1 2.2.0 1.0.0 1.9.22 - 1.7.3 + 1.8.0 0.27.0 1.6.2 4.1.2 @@ -171,7 +170,7 @@ 9.22.3 3.0.3 - 4.25.1 + 4.26.0 4.24.0 2.2 6.0.0 @@ -189,26 +188,26 @@ 5.8.0 4.13.0 2.0.3.Final - 23.0.4 + 23.0.7 1.15.1 3.42.0 - 2.24.1 - 0.25.0 - 1.43.3 + 2.25.0 + 0.26.0 + 1.44.1 2.1 4.7.5 1.1.0 - 1.25.0 + 1.26.0 1.11.0 2.10.1 1.1.2.Final - 2.22.1 + 2.23.0 1.3.0.Final 1.11.3 2.5.8.Final 0.1.18.Final - 1.19.4 - 3.3.4 + 1.19.6 + 3.3.5 2.0.0 1.4.4 @@ -2075,6 +2074,16 @@ quarkus-websockets-client-deployment ${project.version} + + io.quarkus + quarkus-websockets-next + ${project.version} + + + io.quarkus + quarkus-websockets-next-deployment + ${project.version} + io.quarkus quarkus-undertow-spi @@ -5241,11 +5250,6 @@ elasticsearch-rest-client-sniffer ${elasticsearch-opensource-components.version} - - org.junit-pioneer - junit-pioneer - ${junit-pioneer.version} - org.jacoco org.jacoco.core @@ -5405,7 +5409,7 @@ org.infinispan - infinispan-client-hotrod-jakarta + infinispan-client-hotrod ${infinispan.version} @@ -5448,7 +5452,7 @@ org.infinispan - infinispan-commons-jakarta + infinispan-commons ${infinispan.version} diff --git a/bom/dev-ui/pom.xml b/bom/dev-ui/pom.xml index be2f1cc71d1aee..677f6762fb5102 100644 --- a/bom/dev-ui/pom.xml +++ b/bom/dev-ui/pom.xml @@ -27,9 +27,9 @@ 1.4.0 1.7.5 1.7.0 - 5.4.3 - 2.1.0 - 1.8.2 + 5.5.0 + 1.0.12 + 1.8.3 2.4.0 2.15.3 @@ -262,12 +262,12 @@ - org.mvnpm.at.vanillawc - wc-codemirror - ${wc-codemirror.version} + org.mvnpm.at.mvnpm + codeblock + ${codeblock.version} runtime - + org.mvnpm diff --git a/build-parent/pom.xml b/build-parent/pom.xml index 3659002a1b0856..1ca9aff872c000 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -75,7 +75,7 @@ and unfortunately annotation processors are not covered by dependency management in kotlin-maven-plugin; see https://github.com/quarkusio/quarkus/issues/37477#issuecomment-1923662964 --> - 6.4.3.Final + 6.4.4.Final 4.13.0 @@ -83,19 +83,20 @@ :Z - 8.9.1 + 8.12.1 docker.io/elastic/elasticsearch:${elasticsearch-server.version} docker.io/elastic/logstash:${elasticsearch-server.version} docker.io/elastic/kibana:${elasticsearch-server.version} http - 2.9.0 + 2.11.1 docker.io/opensearchproject/opensearch:${opensearch-server.version} http + 2.2.0 docker.io/postgres:14 docker.io/mariadb:10.11 - docker.io/ibmcom/db2:11.5.7.0a + icr.io/db2_community/db2:11.5.9.0 mcr.microsoft.com/mssql/server:2022-latest docker.io/mysql:8.0 docker.io/gvenzl/oracle-free:23-slim-faststart @@ -106,7 +107,7 @@ - 23.0.4 + 23.0.7 19.0.3 quay.io/keycloak/keycloak:${keycloak.version} quay.io/keycloak/keycloak:${keycloak.wildfly.version}-legacy @@ -115,17 +116,17 @@ 3.25.3 - 3.3.1 + 3.4.1 7.3.0 - 2.31.2 + 2.32.0 2.0.0 - 0.43.4 + 0.44.0 2.23.0 1.9.0 3.6.0 @@ -160,8 +161,6 @@ sh ${maven.multiModuleProjectDirectory}/.github/docker-prune.${script.extension} - ${enforcer.skip} - ${surefire.argLine.additional} 1.7.0 @@ -291,6 +290,12 @@ ${assertj.version} test + + org.junit-pioneer + junit-pioneer + ${junit-pioneer.version} + test + org.asciidoctor @@ -373,6 +378,13 @@ + + me.escoffier.certs + certificate-generator-junit5 + 0.4.3 + test + + @@ -501,71 +513,21 @@ - - classpath:enforcer-rules/quarkus-require-java-version.xml - - - classpath:enforcer-rules/quarkus-require-maven-version.xml - classpath:enforcer-rules/quarkus-banned-dependencies.xml classpath:enforcer-rules/quarkus-banned-dependencies-okhttp.xml + + classpath:enforcer-rules/quarkus-banned-dependencies-test.xml + enforce - - enforce-test-deps-scope - - - - - io.quarkus:quarkus-test-* - io.rest-assured:* - org.assertj:* - junit:junit - - - io.quarkus:quarkus-test-*:*:*:test - io.rest-assured:*:*:*:test - org.assertj:*:*:*:test - junit:junit:*:*:test - - Found test dependencies with wrong scope: - - - ${enforce-test-deps-scope.skip} - - - enforce - - - - enforce-test-deps-junit-scope - - - - false - - org.junit.jupiter:* - - - org.junit.jupiter:*:*:*:test - - Found JUnit dependencies with wrong scope: - - - ${enforce-test-deps-scope.skip} - - - enforce - - @@ -712,7 +674,6 @@ org.apache.maven.plugins maven-plugin-plugin - ${maven-plugin-plugin.version} true diff --git a/build.scenario b/build.scenario new file mode 100644 index 00000000000000..cc7794fb99ec7f --- /dev/null +++ b/build.scenario @@ -0,0 +1,40 @@ +clean_install { + tasks = ["clean","install","-Dquickly", "-T6"] + cleanup-tasks = ["clean"] + maven { + targets = ["clean","install","-Dquickly", "-T6"] + } + warm-ups = 1 + iterations = 3 +} + +clean_install_no_scan { + tasks = ["clean","install","-Dquickly", "-T6", "-Dscan=false"] + cleanup-tasks = ["clean"] + maven { + targets = ["clean","install","-Dquickly", "-T6", "-Dscan=false"] + } + warm-ups = 1 + iterations = 3 +} + +clean_install_no_cache { + tasks = ["clean","install","-Dquickly", "-T6", "-Dno-build-cache"] + cleanup-tasks = ["clean"] + maven { + targets = ["clean","install","-Dquickly", "-T6", "-Dno-build-cache"] + } + warm-ups = 1 + iterations = 3 +} + +clean_install_no_cache_no_scan { + tasks = ["clean","install","-Dquickly", "-T6", "-Dno-build-cache", "-Dscan=false"] + cleanup-tasks = ["clean"] + maven { + targets = ["clean","install","-Dquickly", "-T6", "-Dno-build-cache", "-Dscan=false"] + } + warm-ups = 1 + iterations = 3 +} + diff --git a/core/deployment/pom.xml b/core/deployment/pom.xml index d4b13b09d9a8d3..9abc0fadd2204f 100644 --- a/core/deployment/pom.xml +++ b/core/deployment/pom.xml @@ -153,21 +153,30 @@ + org.apache.maven.plugins maven-enforcer-plugin - enforce-test-deps-junit-scope - - true - + enforce enforce - - - enforce-quarkus-core-deployment - + + + + + classpath:enforcer-rules/quarkus-banned-dependencies.xml + + + classpath:enforcer-rules/quarkus-banned-dependencies-okhttp.xml + + @@ -180,9 +189,6 @@ - - enforce - @@ -214,30 +220,6 @@ - - - - com.gradle - gradle-enterprise-maven-extension - - - - - - maven-compiler-plugin - - the extension config doc generation tool shares data across all extensions - - - - - - - - diff --git a/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java index 12448f389fcd04..8f3e059be79f02 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java @@ -24,6 +24,13 @@ public class BootstrapConfig { @ConfigItem(defaultValue = "false") Boolean workspaceDiscovery; + /** + * If set to true, workspace loader will log warnings for modules that could not be loaded for some reason + * instead of throwing errors. + */ + @ConfigItem(defaultValue = "false") + boolean warnOnFailingWorkspaceModules; + /** * By default, the bootstrap mechanism will create a shared cache of open JARs for * Quarkus classloaders to reduce the total number of opened ZIP FileSystems in dev and test modes. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java index 77ed4fba53079b..bd13ee0931508e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java @@ -152,6 +152,49 @@ public String getSource() { return source; } + /** + * Creates a new {@link Builder} instance, using the specified class for the underlying {@link Type} which hierarchy is to + * be be registered for reflection. + * + * @param clazz the Class which hierarchy is to be registered for reflection + * @return a new {@link Builder} instance, initialized from the specified Class + */ + public static Builder builder(Class clazz) { + return builder(clazz.getName()); + } + + /** + * Creates a new {@link Builder} instance, using the specified class for the underlying {@link Type} which hierarchy is to + * be be registered for reflection. + * + * @param className the name of the Class which hierarchy is to be registered for reflection + * @return a new {@link Builder} instance, initialized from the specified Class + */ + public static Builder builder(String className) { + return builder(DotName.createSimple(className)); + } + + /** + * Creates a new {@link Builder} instance, using the specified class for the underlying {@link Type} which hierarchy is to + * be be registered for reflection. + * + * @param className the {@link DotName} of the Class which hierarchy is to be registered for reflection + * @return a new {@link Builder} instance, initialized from the specified Class + */ + public static Builder builder(DotName className) { + return builder(Type.create(className, Type.Kind.CLASS)); + } + + /** + * Creates a new {@link Builder} instance, initializing it with the specified {@link Type} + * + * @param type the {@link Type} which hierarchy is to be registered for reflection + * @return a new {@link Builder} instance, initialized from the specified {@link Type} + */ + public static Builder builder(Type type) { + return new Builder().type(type); + } + public static class Builder { private Type type; @@ -167,6 +210,26 @@ public Builder type(Type type) { return this; } + /** + * Derives the target {@link Type} to be registered from the specified class name. + * + * @param className a {@link DotName} representing the name of the class of the Type to be registered for reflection + * @return this {@link Builder} instance + */ + public Builder className(DotName className) { + return type(Type.create(className, Type.Kind.CLASS)); + } + + /** + * Derives the target {@link Type} to be registered from the specified class name. + * + * @param className the name of the class of the Type to be registered for reflection + * @return this {@link Builder} instance + */ + public Builder className(String className) { + return className(DotName.createSimple(className)); + } + public Builder index(IndexView index) { this.index = index; return this; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java index 4266d04727a6eb..eab173ac613a86 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java @@ -926,9 +926,11 @@ private void copyDependency(Set parentFirstArtifacts, OutputTargetB } } if (removedFromThisArchive.isEmpty()) { - Files.copy(resolvedDep, targetPath, StandardCopyOption.REPLACE_EXISTING); + Files.copy(resolvedDep, targetPath, StandardCopyOption.REPLACE_EXISTING, + StandardCopyOption.COPY_ATTRIBUTES); } else { - //we have removed classes, we need to handle them correctly + // we copy jars for which we remove entries to the same directory + // which seems a bit odd to me filterZipFile(resolvedDep, targetPath, removedFromThisArchive); } } @@ -1251,6 +1253,8 @@ private void filterZipFile(Path resolvedDep, Path targetPath, Set transf } } } + // let's make sure we keep the original timestamp + Files.setLastModifiedTime(targetPath, Files.getLastModifiedTime(resolvedDep)); } } catch (IOException e) { throw new RuntimeException(e); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/UpxCompressionBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/UpxCompressionBuildStep.java index 7ab2f19910dd10..237e18cafa3445 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/UpxCompressionBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/UpxCompressionBuildStep.java @@ -7,6 +7,7 @@ import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -15,7 +16,6 @@ import org.jboss.logging.Logger; import io.quarkus.deployment.annotations.BuildProducer; -import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.pkg.NativeConfig; import io.quarkus.deployment.pkg.builditem.ArtifactResultBuildItem; import io.quarkus.deployment.pkg.builditem.NativeImageBuildItem; @@ -34,7 +34,6 @@ public class UpxCompressionBuildStep { */ private static final String PATH = "PATH"; - @BuildStep(onlyIf = NativeBuild.class) public void compress(NativeConfig nativeConfig, NativeImageRunnerBuildItem nativeImageRunner, NativeImageBuildItem image, BuildProducer upxCompressedProducer, @@ -70,11 +69,13 @@ public void compress(NativeConfig nativeConfig, NativeImageRunnerBuildItem nativ } private boolean runUpxFromHost(File upx, File executable, NativeConfig nativeConfig) { - String level = getCompressionLevel(nativeConfig.compression().level().getAsInt()); List extraArgs = nativeConfig.compression().additionalArgs().orElse(Collections.emptyList()); - List args = Stream.concat( - Stream.concat(Stream.of(upx.getAbsolutePath(), level), extraArgs.stream()), + List args = Stream.of( + Stream.of(upx.getAbsolutePath()), + nativeConfig.compression().level().stream().mapToObj(this::getCompressionLevel), + extraArgs.stream(), Stream.of(executable.getAbsolutePath())) + .flatMap(Function.identity()) .collect(Collectors.toList()); log.infof("Executing %s", String.join(" ", args)); final ProcessBuilder processBuilder = new ProcessBuilder(args) @@ -104,7 +105,6 @@ private boolean runUpxFromHost(File upx, File executable, NativeConfig nativeCon private boolean runUpxInContainer(NativeImageBuildItem nativeImage, NativeConfig nativeConfig, String effectiveBuilderImage) { - String level = getCompressionLevel(nativeConfig.compression().level().getAsInt()); List extraArgs = nativeConfig.compression().additionalArgs().orElse(Collections.emptyList()); List commandLine = new ArrayList<>(); @@ -140,7 +140,9 @@ private boolean runUpxInContainer(NativeImageBuildItem nativeImage, NativeConfig volumeOutputPath + ":" + NativeImageBuildStep.CONTAINER_BUILD_VOLUME_PATH + ":z"); commandLine.add(effectiveBuilderImage); - commandLine.add(level); + if (nativeConfig.compression().level().isPresent()) { + commandLine.add(getCompressionLevel(nativeConfig.compression().level().getAsInt())); + } commandLine.addAll(extraArgs); commandLine.add(nativeImage.getPath().toFile().getName()); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java index 264350bb54b5c2..197145c7bfc0a2 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/RegisterForReflectionBuildStep.java @@ -45,17 +45,18 @@ public void build(CombinedIndexBuildItem combinedIndexBuildItem, Capabilities ca ReflectiveHierarchyBuildItem.Builder builder = new ReflectiveHierarchyBuildItem.Builder(); Set processedReflectiveHierarchies = new HashSet(); + IndexView index = combinedIndexBuildItem.getComputingIndex(); for (AnnotationInstance i : combinedIndexBuildItem.getIndex() .getAnnotations(DotName.createSimple(RegisterForReflection.class.getName()))) { - boolean methods = getBooleanValue(i, "methods"); - boolean fields = getBooleanValue(i, "fields"); - boolean ignoreNested = getBooleanValue(i, "ignoreNested"); - boolean serialization = i.value("serialization") != null && i.value("serialization").asBoolean(); - boolean unsafeAllocated = i.value("unsafeAllocated") != null && i.value("unsafeAllocated").asBoolean(); + boolean methods = i.valueWithDefault(index, "methods").asBoolean(); + boolean fields = i.valueWithDefault(index, "fields").asBoolean(); + boolean ignoreNested = i.valueWithDefault(index, "ignoreNested").asBoolean(); + boolean serialization = i.valueWithDefault(index, "serialization").asBoolean(); + boolean unsafeAllocated = i.valueWithDefault(index, "unsafeAllocated").asBoolean(); + boolean registerFullHierarchyValue = i.valueWithDefault(index, "registerFullHierarchy").asBoolean(); AnnotationValue targetsValue = i.value("targets"); - AnnotationValue registerFullHierarchyValue = i.value("registerFullHierarchy"); AnnotationValue classNamesValue = i.value("classNames"); AnnotationValue lambdaCapturingTypesValue = i.value("lambdaCapturingTypes"); @@ -114,14 +115,14 @@ private void registerClass(ClassLoader classLoader, String className, boolean me boolean ignoreNested, boolean serialization, boolean unsafeAllocated, final BuildProducer reflectiveClass, BuildProducer reflectiveClassHierarchy, Set processedReflectiveHierarchies, - AnnotationValue registerFullHierarchyValue, Builder builder) { + boolean registerFullHierarchyValue, Builder builder) { reflectiveClass.produce(serialization ? ReflectiveClassBuildItem.builder(className).serialization().unsafeAllocated(unsafeAllocated).build() : ReflectiveClassBuildItem.builder(className).constructors().methods(methods).fields(fields) .unsafeAllocated(unsafeAllocated).build()); //Search all class hierarchy, fields and methods in order to register its classes for reflection - if (registerFullHierarchyValue != null && registerFullHierarchyValue.asBoolean()) { + if (registerFullHierarchyValue) { registerClassDependencies(reflectiveClassHierarchy, classLoader, processedReflectiveHierarchies, methods, builder, className); } @@ -228,7 +229,4 @@ private static Type getMethodReturnType(IndexView indexView, DotName initialName return methodReturnType; } - private static boolean getBooleanValue(AnnotationInstance i, String name) { - return i.value(name) == null || i.value(name).asBoolean(); - } } diff --git a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java index adaca8f5ead223..8a259cd7269469 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/runnerjar/ProvidedExtensionDepsTest.java @@ -50,6 +50,7 @@ protected TsArtifact composeApplication() { final TsArtifact depC2 = TsArtifact.jar("dep-c", "2"); // make sure provided dependencies don't override compile/runtime dependencies directProvidedDep.addDependency(depC2); + directProvidedDep.addDependency(extADeploymentDep); final TsArtifact transitiveProvidedDep = TsArtifact.jar("transitive-provided-dep"); directProvidedDep.addDependency(transitiveProvidedDep); @@ -68,7 +69,7 @@ protected void assertAppModel(ApplicationModel model) throws Exception { expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment", "1"), DependencyFlags.DEPLOYMENT_CP)); expected.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment-dep", "1"), - DependencyFlags.DEPLOYMENT_CP)); + DependencyFlags.DEPLOYMENT_CP, DependencyFlags.COMPILE_ONLY)); assertEquals(expected, getDeploymentOnlyDeps(model)); final Set expectedRuntime = new HashSet<>(); @@ -83,7 +84,8 @@ protected void assertAppModel(ApplicationModel model) throws Exception { DependencyFlags.DEPLOYMENT_CP)); expectedRuntime.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "dep-c", "1"), DependencyFlags.RUNTIME_CP, - DependencyFlags.DEPLOYMENT_CP)); + DependencyFlags.DEPLOYMENT_CP, + DependencyFlags.COMPILE_ONLY)); assertEquals(expectedRuntime, getDependenciesWithFlag(model, DependencyFlags.RUNTIME_CP)); final Set expectedCompileOnly = new HashSet<>(); @@ -102,6 +104,13 @@ protected void assertAppModel(ApplicationModel model) throws Exception { .add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "transitive-provided-dep", "1"), JavaScopes.PROVIDED, DependencyFlags.COMPILE_ONLY)); + expectedCompileOnly.add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "dep-c", "1"), + DependencyFlags.RUNTIME_CP, + DependencyFlags.DEPLOYMENT_CP, + DependencyFlags.COMPILE_ONLY)); + expectedCompileOnly + .add(new ArtifactDependency(ArtifactCoords.jar("io.quarkus.bootstrap.test", "ext-a-deployment-dep", "1"), + DependencyFlags.DEPLOYMENT_CP, DependencyFlags.COMPILE_ONLY)); assertEquals(expectedCompileOnly, getDependenciesWithFlag(model, DependencyFlags.COMPILE_ONLY)); final Set compileOnlyPlusRuntime = new HashSet<>(); diff --git a/core/launcher/pom.xml b/core/launcher/pom.xml index e9d538d20e0560..c549657617861a 100644 --- a/core/launcher/pom.xml +++ b/core/launcher/pom.xml @@ -92,7 +92,6 @@ org.apache.maven.plugins maven-jar-plugin - 2023-10-17T10:15:30Z **/LauncherShader.class diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemFinder.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemFinder.java index f5e3720291c584..7e550d58ad3ffb 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemFinder.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocItemFinder.java @@ -2,6 +2,7 @@ import static io.quarkus.annotation.processor.Constants.ANNOTATION_CONFIG_DOC_DEFAULT; import static io.quarkus.annotation.processor.Constants.ANNOTATION_CONFIG_DOC_ENUM_VALUE; +import static io.quarkus.annotation.processor.Constants.ANNOTATION_CONFIG_DOC_IGNORE; import static io.quarkus.annotation.processor.Constants.ANNOTATION_CONFIG_DOC_MAP_KEY; import static io.quarkus.annotation.processor.Constants.ANNOTATION_CONFIG_DOC_SECTION; import static io.quarkus.annotation.processor.Constants.ANNOTATION_CONFIG_ITEM; @@ -246,6 +247,8 @@ private List recursivelyFindConfigItems(Element element, String r : annotationMirror.getElementValues().values().iterator().next().getValue().toString(); } else if (annotationName.equals(ANNOTATION_CONFIG_WITH_UNNAMED_KEY)) { unnamedMapKey = true; + } else if (annotationName.equals(ANNOTATION_CONFIG_DOC_IGNORE)) { + generateDocumentation = false; } } @@ -390,7 +393,8 @@ private List recursivelyFindConfigItems(Element element, String r configDocKey.setConfigPhase(configPhase); configDocKey.setDefaultValue(defaultValue); configDocKey.setDocMapKey(configDocMapKey); - configDocKey.setConfigDoc(javaDocParser.parseConfigDescription(rawJavaDoc)); + javaDocParser.parseConfigDescription(rawJavaDoc, configDocKey::setConfigDoc, configDocKey::setSince); + configDocKey.setEnvironmentVariable(DocGeneratorUtil.toEnvVarName(name)); configDocKey.setAcceptedValues(acceptedValues); configDocKey.setJavaDocSiteLink(getJavaDocSiteLink(type)); ConfigDocItem configDocItem = new ConfigDocItem(); @@ -628,6 +632,8 @@ private List decorateGroupItems( additionalKeys.addAll(additionalNames.stream().map(k -> k + configDocKey.getKey()).collect(toList())); configDocKey.setAdditionalKeys(additionalKeys); configDocKey.setKey(parentName + configDocKey.getKey()); + configDocKey.setEnvironmentVariable( + DocGeneratorUtil.toEnvVarName(parentName) + configDocKey.getEnvironmentVariable()); decoratedItems.add(configDocItem); } else { ConfigDocSection section = configDocItem.getConfigDocSection(); diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocKey.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocKey.java index a853a0f92939ec..f4044599f84411 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocKey.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/ConfigDocKey.java @@ -27,6 +27,8 @@ final public class ConfigDocKey implements ConfigDocElement, Comparable ref = new AtomicReference<>(); + parseConfigDescription(javadocComment, ref::set, s -> { + }); + return ref.get(); + } + + public void parseConfigDescription( + String javadocComment, + Consumer javadocTextConsumer, + Consumer sinceConsumer) { + if (javadocComment == null || javadocComment.trim().isEmpty()) { - return Constants.EMPTY; + javadocTextConsumer.accept(Constants.EMPTY); + return; } // the parser expects all the lines to start with "* " @@ -90,10 +105,16 @@ public String parseConfigDescription(String javadocComment) { Javadoc javadoc = StaticJavaParser.parseJavadoc(javadocComment); if (isAsciidoc(javadoc)) { - return handleEolInAsciidoc(javadoc); + javadocTextConsumer.accept(handleEolInAsciidoc(javadoc)); + } else { + javadocTextConsumer.accept(htmlJavadocToAsciidoc(javadoc.getDescription())); } - - return htmlJavadocToAsciidoc(javadoc.getDescription()); + javadoc.getBlockTags().stream() + .filter(t -> t.getType() == Type.SINCE) + .map(JavadocBlockTag::getContent) + .map(JavadocDescription::toText) + .findFirst() + .ifPresent(sinceConsumer::accept); } public SectionHolder parseConfigSection(String javadocComment, int sectionLevel) { diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/MavenConfigDocBuilder.java b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/MavenConfigDocBuilder.java index 16d3c31a232052..a5b1fcf98cc0e6 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/MavenConfigDocBuilder.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/MavenConfigDocBuilder.java @@ -79,11 +79,8 @@ public void addParam(String type, String name, String defaultValue, boolean requ configDocKey.setAdditionalKeys(List.of(name)); configDocKey.setConfigPhase(ConfigPhase.RUN_TIME); configDocKey.setDefaultValue(defaultValue == null ? Constants.EMPTY : defaultValue); - if (description != null && !description.isBlank()) { - configDocKey.setConfigDoc(javaDocParser.parseConfigDescription(description)); - } else { - configDocKey.setConfigDoc(EMPTY); - } + javaDocParser.parseConfigDescription(description, configDocKey::setConfigDoc, configDocKey::setSince); + configDocKey.setEnvironmentVariable(DocGeneratorUtil.toEnvVarName(name)); configDocKey.setOptional(!required); final ConfigDocItem configDocItem = new ConfigDocItem(); configDocItem.setConfigDocKey(configDocKey); diff --git a/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java b/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java index f23705f230e63e..60d4200fe52a36 100644 --- a/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java +++ b/core/processor/src/test/java/io/quarkus/annotation/processor/generate_doc/JavaDocConfigDescriptionParserTest.java @@ -3,6 +3,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; import org.asciidoctor.Asciidoctor.Factory; import org.junit.jupiter.api.BeforeEach; @@ -252,6 +253,15 @@ public void parseJavaDocWithCodeBlock() { // parser.parseConfigDescription("Example:\n\n
{@code\nfoo\nbar\n}
")); } + @Test + public void since() { + AtomicReference javadoc = new AtomicReference<>(); + AtomicReference since = new AtomicReference<>(); + parser.parseConfigDescription("Javadoc text\n\n@since 1.2.3", javadoc::set, since::set); + assertEquals("Javadoc text", javadoc.get()); + assertEquals("1.2.3", since.get()); + } + @Test public void asciidoc() { String asciidoc = "== My Asciidoc\n" + diff --git a/core/runtime/pom.xml b/core/runtime/pom.xml index f7cde7edbb5b3b..0fd1e0e2a6a10a 100644 --- a/core/runtime/pom.xml +++ b/core/runtime/pom.xml @@ -272,30 +272,6 @@ - - - - com.gradle - gradle-enterprise-maven-extension - - - - - - maven-compiler-plugin - - the extension config doc generation tool shares data across all extensions - - - - - - - - diff --git a/core/runtime/src/main/java/io/quarkus/runtime/annotations/RegisterForReflection.java b/core/runtime/src/main/java/io/quarkus/runtime/annotations/RegisterForReflection.java index 8cf279a84f64b1..f08da15fcdd9be 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/annotations/RegisterForReflection.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/annotations/RegisterForReflection.java @@ -7,9 +7,12 @@ /** * Annotation that can be used to force a class to be registered for reflection in native image mode. - * Note that by default nested classes and interfaces are not registered, unless {@link #ignoreNested()} is set to false. - * Similarly, by default only the class itself is registered, not the full class hierarchy. This can be changed by setting - * {@link #registerFullHierarchy()} to true. + * Note that by default the class itself is registered including nested classes and interfaces, + * but not the full class hierarchy. This can be changed by setting: + *
    + *
  • {@link #ignoreNested()} to true, to ignore nested classes.
  • + *
  • {@link #registerFullHierarchy()} to true, to register the full hierarchy.
  • + *
*/ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @@ -26,11 +29,10 @@ boolean fields() default true; /** - * If nested classes/interfaces should be ignored/registered - * - * This is useful when it's necessary to register inner (especially private) classes for Reflection. + * If nested classes/interfaces should be ignored. + * By default, nested classes are registered. To ignore them set it to true. */ - boolean ignoreNested() default true; + boolean ignoreNested() default false; /** * Alternative classes that should actually be registered for reflection instead of the current class. diff --git a/devtools/bom-descriptor-json/pom.xml b/devtools/bom-descriptor-json/pom.xml index bdc9ab6b66b464..dcc5e857a1f9c5 100644 --- a/devtools/bom-descriptor-json/pom.xml +++ b/devtools/bom-descriptor-json/pom.xml @@ -2891,6 +2891,19 @@
+ + io.quarkus + quarkus-websockets-next + ${project.version} + pom + test + + + * + * + + + diff --git a/devtools/cli/src/main/java/io/quarkus/cli/QuarkusCli.java b/devtools/cli/src/main/java/io/quarkus/cli/QuarkusCli.java index 82fd27e2c7542e..e2a01748c60054 100644 --- a/devtools/cli/src/main/java/io/quarkus/cli/QuarkusCli.java +++ b/devtools/cli/src/main/java/io/quarkus/cli/QuarkusCli.java @@ -101,6 +101,7 @@ public int run(String... args) throws Exception { //When running tests the cli should not prompt for user input. boolean interactiveMode = Arrays.stream(args).noneMatch(arg -> arg.equals("--cli-test")); Optional testDir = Arrays.stream(args).dropWhile(arg -> !arg.equals("--cli-test-dir")).skip(1).findFirst(); + boolean noCommand = args.length == 0 || args[0].startsWith("-"); boolean helpCommand = Arrays.stream(args).anyMatch(arg -> arg.equals("--help")); boolean pluginCommand = args.length >= 1 && (args[0].equals("plug") || args[0].equals("plugin")); @@ -111,7 +112,7 @@ public int run(String... args) throws Exception { // If the command already exists and is not a help command (that lists subcommands) or plugin command, then just execute // without dealing with plugins. // The reason that we check if its a plugin command is that plugin commands need PluginManager initialization. - if (existingCommand && !helpCommand && !pluginCommand) { + if (existingCommand && !noCommand && !helpCommand && !pluginCommand) { return cmd.execute(args); } PluginCommandFactory pluginCommandFactory = new PluginCommandFactory(output); @@ -119,14 +120,15 @@ public int run(String... args) throws Exception { pluginManager.syncIfNeeded(); Map plugins = new HashMap<>(pluginManager.getInstalledPlugins()); pluginCommandFactory.populateCommands(cmd, plugins); - missingCommand.ifPresent(m -> { + missingCommand.filter(m -> !plugins.containsKey(m)).ifPresent(m -> { try { + output.info("Command %s is not available, looking for available plugins ...", m); Map installable = pluginManager.getInstallablePlugins(); if (installable.containsKey(m)) { Plugin candidate = installable.get(m); PluginListItem item = new PluginListItem(false, candidate); PluginListTable table = new PluginListTable(List.of(item)); - output.info("Command %s not installed but the following plugin is available:\n%s", m, + output.info("Plugin %s is available:\n%s", m, table.getContent()); if (interactiveMode && Prompt.yesOrNo(true, "Would you like to install it now?", diff --git a/devtools/cli/src/main/java/io/quarkus/cli/core/Reflections.java b/devtools/cli/src/main/java/io/quarkus/cli/core/Reflections.java index 2fe0dffa125d36..b3e6eb35849de0 100644 --- a/devtools/cli/src/main/java/io/quarkus/cli/core/Reflections.java +++ b/devtools/cli/src/main/java/io/quarkus/cli/core/Reflections.java @@ -39,6 +39,6 @@ org.eclipse.aether.internal.impl.SimpleLocalRepositoryManagerFactory.class, org.eclipse.aether.internal.impl.collect.DefaultDependencyCollector.class, org.eclipse.aether.transport.wagon.WagonTransporterFactory.class -}) +}, ignoreNested = true) public class Reflections { } diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java index 6f7df0aad8e79c..718a53f8798f94 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/extension/QuarkusPluginExtension.java @@ -1,5 +1,7 @@ package io.quarkus.gradle.extension; +import static io.quarkus.runtime.LaunchMode.*; + import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; @@ -38,6 +40,7 @@ import io.quarkus.gradle.tasks.QuarkusGradleUtils; import io.quarkus.gradle.tooling.ToolingUtils; import io.quarkus.runtime.LaunchMode; +import io.smallrye.config.SmallRyeConfig; public abstract class QuarkusPluginExtension extends AbstractQuarkusExtension { private final SourceSetExtension sourceSetExtension; @@ -67,10 +70,14 @@ public void manifest(Action action) { public void beforeTest(Test task) { try { - final Map props = task.getSystemProperties(); + Map props = task.getSystemProperties(); + ApplicationModel appModel = getApplicationModel(TEST); + + SmallRyeConfig config = buildEffectiveConfiguration(appModel.getAppArtifact()).getConfig(); + config.getOptionalValue(TEST.getProfileKey(), String.class) + .ifPresent(value -> props.put(TEST.getProfileKey(), value)); - final ApplicationModel appModel = getApplicationModel(LaunchMode.TEST); - final Path serializedModel = ToolingUtils.serializeAppModel(appModel, task, true); + Path serializedModel = ToolingUtils.serializeAppModel(appModel, task, true); props.put(BootstrapConstants.SERIALIZED_TEST_APP_MODEL, serializedModel.toString()); StringJoiner outputSourcesDir = new StringJoiner(","); @@ -79,10 +86,10 @@ public void beforeTest(Test task) { } props.put(BootstrapConstants.OUTPUT_SOURCES_DIR, outputSourcesDir.toString()); - final SourceSetContainer sourceSets = getSourceSets(); - final SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); + SourceSetContainer sourceSets = getSourceSets(); + SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); - final File outputDirectoryAsFile = getLastFile(mainSourceSet.getOutput().getClassesDirs()); + File outputDirectoryAsFile = getLastFile(mainSourceSet.getOutput().getClassesDirs()); Path projectDirPath = projectDir.toPath(); @@ -167,7 +174,7 @@ public Set combinedOutputSourceDirs() { } public AppModelResolver getAppModelResolver() { - return getAppModelResolver(LaunchMode.NORMAL); + return getAppModelResolver(NORMAL); } public AppModelResolver getAppModelResolver(LaunchMode mode) { @@ -175,7 +182,7 @@ public AppModelResolver getAppModelResolver(LaunchMode mode) { } public ApplicationModel getApplicationModel() { - return getApplicationModel(LaunchMode.NORMAL); + return getApplicationModel(NORMAL); } public ApplicationModel getApplicationModel(LaunchMode mode) { diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java index f2696a9266f233..85f67e430dfc16 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/AbstractQuarkusExtension.java @@ -1,12 +1,17 @@ package io.quarkus.gradle.tasks; import static io.quarkus.gradle.tasks.QuarkusGradleUtils.getSourceSet; +import static io.smallrye.common.expression.Expression.Flag.DOUBLE_COLON; +import static io.smallrye.common.expression.Expression.Flag.LENIENT_SYNTAX; +import static io.smallrye.common.expression.Expression.Flag.NO_SMART_BRACES; +import static io.smallrye.common.expression.Expression.Flag.NO_TRIM; import static java.util.Collections.emptyList; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -24,6 +29,7 @@ import io.quarkus.gradle.dsl.Manifest; import io.quarkus.maven.dependency.ResolvedDependency; +import io.smallrye.common.expression.Expression; /** * This base class exists to hide internal properties, make those only available in the {@link io.quarkus.gradle.tasks} @@ -138,7 +144,7 @@ private EffectiveConfig buildEffectiveConfiguration(Map properti * @param appArtifact the application dependency to retrive the quarkus application name and version. * @return a filtered view of the configuration only with quarkus. names. */ - protected Map buildSystemProperties(ResolvedDependency appArtifact) { + protected Map buildSystemProperties(ResolvedDependency appArtifact, Map quarkusProperties) { Map buildSystemProperties = new HashMap<>(); buildSystemProperties.putIfAbsent("quarkus.application.name", appArtifact.getArtifactId()); buildSystemProperties.putIfAbsent("quarkus.application.version", appArtifact.getVersion()); @@ -158,6 +164,33 @@ protected Map buildSystemProperties(ResolvedDependency appArtifa buildSystemProperties.put(entry.getKey(), entry.getValue().toString()); } } + + Set quarkusValues = new HashSet<>(); + quarkusValues.addAll(quarkusProperties.values()); + quarkusValues.addAll(buildSystemProperties.values()); + + for (String value : quarkusValues) { + Expression expression = Expression.compile(value, LENIENT_SYNTAX, NO_TRIM, NO_SMART_BRACES, DOUBLE_COLON); + for (String reference : expression.getReferencedStrings()) { + String expanded = forcedPropertiesProperty.get().get(reference); + if (expanded != null) { + buildSystemProperties.put(reference, expanded); + continue; + } + + expanded = quarkusBuildProperties.get().get(reference); + if (expanded != null) { + buildSystemProperties.put(reference, expanded); + continue; + } + + expanded = (String) project.getProperties().get(reference); + if (expanded != null) { + buildSystemProperties.put(reference, expanded); + } + } + } + return buildSystemProperties; } diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java index 90c229c043b9b3..5e0d9533e92dcf 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/BaseConfig.java @@ -21,20 +21,20 @@ final class BaseConfig { private final Manifest manifest; private final PackageConfig packageConfig; - private final Map configMap; + private final Map values; // Note: EffectiveConfig has all the code to load the configurations from all the sources. BaseConfig(EffectiveConfig config) { manifest = new Manifest(); packageConfig = new PackageConfig(); - ConfigInstantiator.handleObject(packageConfig, config.config()); + ConfigInstantiator.handleObject(packageConfig, config.getConfig()); // populate the Gradle Manifest object manifest.attributes(packageConfig.manifest.attributes); packageConfig.manifest.manifestSections.forEach((section, attribs) -> manifest.attributes(attribs, section)); - configMap = config.configMap(); + values = config.getValues(); } PackageConfig packageConfig() { @@ -53,7 +53,7 @@ Map cachingRelevantProperties(List propertyPatterns) { List patterns = propertyPatterns.stream().map(s -> "^(" + s + ")$").map(Pattern::compile) .collect(Collectors.toList()); Predicate> keyPredicate = e -> patterns.stream().anyMatch(p -> p.matcher(e.getKey()).matches()); - return configMap.entrySet().stream() + return values.entrySet().stream() .filter(keyPredicate) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/Deploy.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/Deploy.java index 132d956279771e..d5b13e65e4a06c 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/Deploy.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/Deploy.java @@ -90,9 +90,8 @@ public Deploy() { @TaskAction public void checkRequiredExtensions() { ApplicationModel appModel = resolveAppModelForBuild(); - Map configMap = extension().buildEffectiveConfiguration(appModel.getAppArtifact()).configMap(); Properties sysProps = new Properties(); - sysProps.putAll(configMap); + sysProps.putAll(extension().buildEffectiveConfiguration(appModel.getAppArtifact()).getValues()); try (CuratedApplication curatedApplication = QuarkusBootstrap.builder() .setBaseClassLoader(getClass().getClassLoader()) .setExistingModel(appModel) diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java index 37cff3614d7795..1c67c52ac46379 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/EffectiveConfig.java @@ -14,6 +14,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Supplier; import org.eclipse.microprofile.config.spi.ConfigSource; import org.eclipse.microprofile.config.spi.ConfigSourceProvider; @@ -26,6 +27,7 @@ import io.quarkus.runtime.configuration.ConfigUtils; import io.smallrye.config.AbstractLocationConfigSourceLoader; import io.smallrye.config.EnvConfigSource; +import io.smallrye.config.Expressions; import io.smallrye.config.PropertiesConfigSource; import io.smallrye.config.PropertiesConfigSourceProvider; import io.smallrye.config.SmallRyeConfig; @@ -41,10 +43,9 @@ * Eventually used to construct a map with the effective config options from all the sources above and expose * the Quarkus config objects like {@link PackageConfig}, {@link ClassLoadingConfig} and the underlying {@link SmallRyeConfig}. */ -final class EffectiveConfig { - private final Map fullConfig; - +public final class EffectiveConfig { private final SmallRyeConfig config; + private final Map values; private EffectiveConfig(Builder builder) { List configSources = new ArrayList<>(); @@ -81,13 +82,17 @@ private EffectiveConfig(Builder builder) { .addAll(PropertiesConfigSourceProvider.classPathSources(META_INF_MICROPROFILE_CONFIG_PROPERTIES, classLoader)); this.config = buildConfig(builder.profile, configSources); - this.fullConfig = generateFullConfigMap(config); + this.values = generateFullConfigMap(config); } - SmallRyeConfig config() { + public SmallRyeConfig getConfig() { return config; } + public Map getValues() { + return values; + } + private Map asStringMap(Map map) { Map target = new HashMap<>(); map.forEach((k, v) -> { @@ -100,14 +105,19 @@ private Map asStringMap(Map map) { @VisibleForTesting static Map generateFullConfigMap(SmallRyeConfig config) { - Map map = new HashMap<>(); - config.getPropertyNames().forEach(property -> { - String v = config.getConfigValue(property).getValue(); - if (v != null) { - map.put(property, v); + return Expressions.withoutExpansion(new Supplier>() { + @Override + public Map get() { + Map properties = new HashMap<>(); + for (String propertyName : config.getPropertyNames()) { + String value = config.getRawValue(propertyName); + if (value != null) { + properties.put(propertyName, value); + } + } + return unmodifiableMap(properties); } }); - return unmodifiableMap(map); } @VisibleForTesting @@ -126,10 +136,6 @@ static Builder builder() { return new Builder(); } - Map configMap() { - return fullConfig; - } - static final class Builder { private Map buildProperties = emptyMap(); private Map projectProperties = emptyMap(); diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildDependencies.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildDependencies.java index a410e1af412703..d18dd3c70636f7 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildDependencies.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildDependencies.java @@ -24,6 +24,7 @@ import io.quarkus.maven.dependency.ArtifactKey; import io.quarkus.maven.dependency.DependencyFlags; import io.quarkus.maven.dependency.ResolvedDependency; +import io.smallrye.config.SmallRyeConfig; /** * Collect the Quarkus app dependencies, the contents of the {@code quarkus-app/lib} folder, without making the task @@ -139,22 +140,23 @@ private void jarDependencies(Path libBoot, Path libMain) { } ApplicationModel appModel = resolveAppModelForBuild(); - Map configMap = extension().buildEffectiveConfiguration(appModel.getAppArtifact()).configMap(); + SmallRyeConfig config = extension().buildEffectiveConfiguration(appModel.getAppArtifact()).getConfig(); // see https://quarkus.io/guides/class-loading-reference#configuring-class-loading - Set removedArtifacts = java.util.Optional.ofNullable( - configMap.getOrDefault(CLASS_LOADING_REMOVED_ARTIFACTS, null)) + Set removedArtifacts = config.getOptionalValue(CLASS_LOADING_REMOVED_ARTIFACTS, String.class) .map(QuarkusBuildDependencies::dependenciesListToArtifactKeySet) .orElse(Collections.emptySet()); - getLogger().info("Removed artifacts: {}", configMap.getOrDefault(CLASS_LOADING_REMOVED_ARTIFACTS, "(none)")); + getLogger().info("Removed artifacts: {}", + config.getOptionalValue(CLASS_LOADING_REMOVED_ARTIFACTS, String.class).orElse("(none)")); - String parentFirstArtifactsProp = configMap.getOrDefault(CLASS_LOADING_PARENT_FIRST_ARTIFACTS, ""); + String parentFirstArtifactsProp = config.getOptionalValue(CLASS_LOADING_PARENT_FIRST_ARTIFACTS, String.class) + .orElse(""); Set parentFirstArtifacts = dependenciesListToArtifactKeySet(parentFirstArtifactsProp); - getLogger().info("parent first artifacts: {}", configMap.getOrDefault(CLASS_LOADING_PARENT_FIRST_ARTIFACTS, "(none)")); + getLogger().info("parent first artifacts: {}", + config.getOptionalValue(CLASS_LOADING_PARENT_FIRST_ARTIFACTS, String.class).orElse("(none)")); - String optionalDependenciesProp = configMap.getOrDefault(INCLUDED_OPTIONAL_DEPENDENCIES, ""); - boolean filterOptionalDependencies = Boolean - .parseBoolean(configMap.getOrDefault(FILTER_OPTIONAL_DEPENDENCIES, "false")); + String optionalDependenciesProp = config.getOptionalValue(INCLUDED_OPTIONAL_DEPENDENCIES, String.class).orElse(""); + boolean filterOptionalDependencies = config.getOptionalValue(FILTER_OPTIONAL_DEPENDENCIES, Boolean.class).orElse(false); Set optionalDependencies = filterOptionalDependencies ? dependenciesListToArtifactKeySet(optionalDependenciesProp) : Collections.emptySet(); diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java index 88196dbba173aa..827ace86ae8458 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusBuildTask.java @@ -29,6 +29,7 @@ import io.quarkus.gradle.tasks.worker.BuildWorker; import io.quarkus.maven.dependency.GACTV; import io.smallrye.config.Expressions; +import io.smallrye.config.SmallRyeConfig; /** * Base class for the {@link QuarkusBuildDependencies}, {@link QuarkusBuildCacheableAppParts}, {@link QuarkusBuild} tasks @@ -207,30 +208,29 @@ void generateBuild() { }); ApplicationModel appModel = resolveAppModelForBuild(); - Map configMap = new HashMap<>(); - EffectiveConfig effectiveConfig = extension().buildEffectiveConfiguration(appModel.getAppArtifact()); - Expressions.withoutExpansion(() -> { - for (Map.Entry entry : effectiveConfig.configMap().entrySet()) { - if (entry.getKey().startsWith("quarkus.")) { - configMap.put(entry.getKey(), effectiveConfig.config().getRawValue(entry.getKey())); - } - } + SmallRyeConfig config = extension().buildEffectiveConfiguration(appModel.getAppArtifact()).getConfig(); + Map quarkusProperties = Expressions.withoutExpansion(() -> { + Map values = new HashMap<>(); + config.getValues("quarkus", String.class, String.class) + .forEach((key, value) -> values.put("quarkus." + key, value)); + return values; }); getLogger().info("Starting Quarkus application build for package type {}", packageType); if (getLogger().isEnabled(LogLevel.INFO)) { getLogger().info("Effective properties: {}", - configMap.entrySet().stream() + quarkusProperties.entrySet().stream() .map(Object::toString) .sorted() .collect(Collectors.joining("\n ", "\n ", ""))); } - WorkQueue workQueue = workQueue(configMap, () -> extension().buildForkOptions); + WorkQueue workQueue = workQueue(quarkusProperties, () -> extension().buildForkOptions); workQueue.submit(BuildWorker.class, params -> { - params.getBuildSystemProperties().putAll(extension().buildSystemProperties(appModel.getAppArtifact())); + params.getBuildSystemProperties() + .putAll(extension().buildSystemProperties(appModel.getAppArtifact(), quarkusProperties)); params.getBaseName().set(extension().finalName()); params.getTargetDirectory().set(buildDir.toFile()); params.getAppModel().set(appModel); diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java index b01ffdb6732f05..1b1cc049c8bd54 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusGenerateCode.java @@ -102,17 +102,17 @@ public Set getInputDirectory() { @TaskAction public void generateCode() { ApplicationModel appModel = extension().getApplicationModel(launchMode); - Map configMap = extension().buildEffectiveConfiguration(appModel.getAppArtifact()).configMap(); + Map values = extension().buildEffectiveConfiguration(appModel.getAppArtifact()).getValues(); File outputPath = getGeneratedOutputDirectory().get().getAsFile(); getLogger().debug("Will trigger preparing sources for source directories: {} buildDir: {}", sourcesDirectories, buildDir.getAbsolutePath()); - WorkQueue workQueue = workQueue(configMap, () -> extension().codeGenForkOptions); + WorkQueue workQueue = workQueue(values, () -> extension().codeGenForkOptions); workQueue.submit(CodeGenWorker.class, params -> { - params.getBuildSystemProperties().putAll(configMap); + params.getBuildSystemProperties().putAll(values); params.getBaseName().set(extension().finalName()); params.getTargetDirectory().set(buildDir); params.getAppModel().set(appModel); diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusRun.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusRun.java index ea37bcb6875d2d..5935f9d46caea7 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusRun.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusRun.java @@ -100,9 +100,8 @@ public void setJvmArgs(List jvmArgs) { @TaskAction public void runQuarkus() { ApplicationModel appModel = resolveAppModelForBuild(); - Map configMap = extension().buildEffectiveConfiguration(appModel.getAppArtifact()).configMap(); Properties sysProps = new Properties(); - sysProps.putAll(configMap); + sysProps.putAll(extension().buildEffectiveConfiguration(appModel.getAppArtifact()).getValues()); try (CuratedApplication curatedApplication = QuarkusBootstrap.builder() .setBaseClassLoader(getClass().getClassLoader()) .setExistingModel(appModel) diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusShowEffectiveConfig.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusShowEffectiveConfig.java index cd37b8b1aad86f..2115ee6ee6ad64 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusShowEffectiveConfig.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusShowEffectiveConfig.java @@ -9,7 +9,6 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; @@ -22,6 +21,7 @@ import org.gradle.api.tasks.options.Option; import io.quarkus.gradle.QuarkusPlugin; +import io.smallrye.config.SmallRyeConfig; /** * Just show the effective configuration and settings. @@ -45,21 +45,21 @@ public Property getSaveConfigProperties() { @TaskAction public void dumpEffectiveConfiguration() { try { - EffectiveConfig effective = extension() + EffectiveConfig effectiveConfig = extension() .buildEffectiveConfiguration(extension().getApplicationModel().getAppArtifact()); - Map configMap = effective.configMap(); + SmallRyeConfig config = effectiveConfig.getConfig(); List sourceNames = new ArrayList<>(); - effective.config().getConfigSources().forEach(configSource -> sourceNames.add(configSource.getName())); + config.getConfigSources().forEach(configSource -> sourceNames.add(configSource.getName())); - String config = configMap.entrySet().stream() - .filter(e -> e.getKey().startsWith("quarkus.")) - .map(e -> format("%s=%s", e.getKey(), e.getValue())).sorted() + String quarkusConfig = config.getValues("quarkus", String.class, String.class) + .entrySet() + .stream() + .map(e -> format("quarkus.%s=%s", e.getKey(), e.getValue())).sorted() .collect(Collectors.joining("\n ", "\n ", "\n")); - - getLogger().lifecycle("Effective Quarkus configuration options: {}", config); + getLogger().lifecycle("Effective Quarkus configuration options: {}", quarkusConfig); String finalName = extension().finalName(); - String packageType = configMap.getOrDefault(QuarkusPlugin.QUARKUS_PACKAGE_TYPE, "fast-jar"); + String packageType = config.getOptionalValue(QuarkusPlugin.QUARKUS_PACKAGE_TYPE, String.class).orElse("fast-jar"); File fastJar = fastJar(); getLogger().lifecycle( "Quarkus package type: {}\n" + @@ -79,7 +79,7 @@ public void dumpEffectiveConfiguration() { if (getSaveConfigProperties().get()) { Properties props = new Properties(); - props.putAll(configMap); + props.putAll(effectiveConfig.getValues()); Path file = buildDir.toPath().resolve(finalName + ".quarkus-build.properties"); try (BufferedWriter writer = newBufferedWriter(file)) { props.store(writer, format("Quarkus build properties with package type %s", packageType)); diff --git a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java index 3294472909fb14..e8dade749dc72e 100644 --- a/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java +++ b/devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/QuarkusTask.java @@ -61,9 +61,14 @@ WorkQueue workQueue(Map configMap, Supplier configMap, List> customizations) { JavaForkOptions forkOptions = processWorkerSpec.getForkOptions(); - customizations.forEach(a -> a.execute(forkOptions)); + // Propagate user.dir to load config sources that use it (instead of the worker user.dir) + String userDir = configMap.get("user.dir"); + if (userDir != null) { + forkOptions.systemProperty("user.dir", userDir); + } + String quarkusWorkerMaxHeap = System.getProperty("quarkus.gradle-worker.max-heap"); if (quarkusWorkerMaxHeap != null && forkOptions.getAllJvmArgs().stream().noneMatch(arg -> arg.startsWith("-Xmx"))) { forkOptions.jvmArgs("-Xmx" + quarkusWorkerMaxHeap); diff --git a/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/CryptoConfigTest.java b/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/CryptoConfigTest.java new file mode 100644 index 00000000000000..302551934e8db1 --- /dev/null +++ b/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/CryptoConfigTest.java @@ -0,0 +1,33 @@ +package io.quarkus.gradle.tasks; + +import java.io.File; +import java.net.URL; +import java.nio.file.Path; + +import org.apache.commons.io.FileUtils; +import org.gradle.testkit.runner.GradleRunner; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +public class CryptoConfigTest { + + @TempDir + Path testProjectDir; + + @Test + @Disabled("To be fixed via https://github.com/quarkusio/quarkus/issues/38007") + void smallryeCrypto() throws Exception { + URL url = getClass().getClassLoader().getResource("io/quarkus/gradle/tasks/crypto/main"); + FileUtils.copyDirectory(new File(url.toURI()), testProjectDir.toFile()); + FileUtils.copyFile(new File("../gradle.properties"), testProjectDir.resolve("gradle.properties").toFile()); + + GradleRunner.create() + .withPluginClasspath() + .withProjectDir(testProjectDir.toFile()) + .withArguments("build", "--info", "--stacktrace", "--build-cache", "--configuration-cache") + // .build() checks whether the build failed, which is good enough for this test + .build(); + + } +} diff --git a/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/EffectiveConfigTest.java b/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/EffectiveConfigTest.java index ad239fe7fc39f3..18e6b53f403219 100644 --- a/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/EffectiveConfigTest.java +++ b/devtools/gradle/gradle-application-plugin/src/test/java/io/quarkus/gradle/tasks/EffectiveConfigTest.java @@ -12,6 +12,7 @@ import org.assertj.core.api.SoftAssertions; import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -32,21 +33,30 @@ void empty() { // Cannot do an exact match, because `map` contains both the "raw" environment variables AND the // "property-key-ish" entries - i.e. environment appears "twice". - soft.assertThat(effectiveConfig.configMap()).containsAllEntriesOf(expect); + soft.assertThat(effectiveConfig.getValues()).containsAllEntriesOf(expect); } @Test void fromProjectProperties() { EffectiveConfig effectiveConfig = EffectiveConfig.builder().withProjectProperties(Map.of("quarkus.foo", "bar")).build(); - soft.assertThat(effectiveConfig.configMap()).containsEntry("quarkus.foo", "bar"); + soft.assertThat(effectiveConfig.getValues()).containsEntry("quarkus.foo", "bar"); } @Test void fromForcedProperties() { EffectiveConfig effectiveConfig = EffectiveConfig.builder().withTaskProperties(Map.of("quarkus.foo", "bar")).build(); - soft.assertThat(effectiveConfig.configMap()).containsEntry("quarkus.foo", "bar"); + soft.assertThat(effectiveConfig.getValues()).containsEntry("quarkus.foo", "bar"); + } + + @Test + @Disabled("To be fixed via https://github.com/quarkusio/quarkus/issues/38007") + void crypto() { + EffectiveConfig effectiveConfig = EffectiveConfig.builder() + .withTaskProperties(Map.of("quarkus.foo", "${aes-gcm-nopadding::superSecret}")).build(); + + soft.assertThat(effectiveConfig.getValues()).containsEntry("quarkus.foo", "superSecret"); } @Test @@ -59,13 +69,13 @@ void appPropsOverload() throws Exception { EffectiveConfig effectiveConfig = EffectiveConfig.builder().withSourceDirectories(source).build(); - SmallRyeConfig config = effectiveConfig.config(); + SmallRyeConfig config = effectiveConfig.getConfig(); List sourceNames = new ArrayList<>(); config.getConfigSources().forEach(configSource -> sourceNames.add(configSource.getName())); soft.assertThat(sourceNames).anyMatch(s -> s.contains(url1.getPath())); soft.assertThat(sourceNames).anyMatch(s -> s.contains(url2.getPath())); // The YAML source is always higher in ordinal than the properties source - soft.assertThat(effectiveConfig.configMap()).containsEntry("quarkus.prop.overload", "from-yaml"); + soft.assertThat(effectiveConfig.getValues()).containsEntry("quarkus.prop.overload", "from-yaml"); } @Test @@ -80,14 +90,14 @@ void appPropsOverloadWrongProfile() throws Exception { EffectiveConfig effectiveConfig = EffectiveConfig.builder().withSourceDirectories(source).build(); - SmallRyeConfig config = effectiveConfig.config(); + SmallRyeConfig config = effectiveConfig.getConfig(); List sourceNames = new ArrayList<>(); config.getConfigSources().forEach(configSource -> sourceNames.add(configSource.getName())); soft.assertThat(sourceNames).anyMatch(s -> s.contains(url1.getPath())); soft.assertThat(sourceNames).anyMatch(s -> s.contains(url2.getPath())); soft.assertThat(sourceNames).anyMatch(s -> s.contains(url3.getPath())); // The YAML source is always higher in ordinal than the properties source - soft.assertThat(effectiveConfig.configMap()).containsEntry("quarkus.prop.overload", "from-yaml"); + soft.assertThat(effectiveConfig.getValues()).containsEntry("quarkus.prop.overload", "from-yaml"); } @Test @@ -106,7 +116,7 @@ void appPropsOverloadProdProfile() throws Exception { EffectiveConfig effectiveConfig = EffectiveConfig.builder().withSourceDirectories(source).build(); - SmallRyeConfig config = effectiveConfig.config(); + SmallRyeConfig config = effectiveConfig.getConfig(); List sourceNames = new ArrayList<>(); config.getConfigSources().forEach(configSource -> sourceNames.add(configSource.getName())); soft.assertThat(sourceNames).anyMatch(s -> s.contains(url1.getPath())); @@ -115,6 +125,6 @@ void appPropsOverloadProdProfile() throws Exception { soft.assertThat(sourceNames).anyMatch(s -> s.contains(url4.getPath())); soft.assertThat(sourceNames).anyMatch(s -> s.contains(url5.getPath())); // The YAML source is always higher in ordinal than the properties source, even for profile property names - soft.assertThat(effectiveConfig.configMap()).containsEntry("quarkus.prop.overload", "from-yaml-prod"); + soft.assertThat(effectiveConfig.getValues()).containsEntry("quarkus.prop.overload", "from-yaml-prod"); } } diff --git a/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/build.gradle.kts b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/build.gradle.kts new file mode 100644 index 00000000000000..c74f8645525148 --- /dev/null +++ b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/build.gradle.kts @@ -0,0 +1,21 @@ +plugins { + java + id("io.quarkus") +} + +buildscript { + repositories { + mavenLocal() + mavenCentral() + } +} + +repositories { + mavenLocal() + mavenCentral() +} + +dependencies { + implementation(enforcedPlatform("io.quarkus:quarkus-bom:${project.property("version")}")) + implementation("jakarta.inject:jakarta.inject-api:2.0.1") +} diff --git a/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/settings.gradle.kts b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/settings.gradle.kts new file mode 100644 index 00000000000000..738e4e47476c93 --- /dev/null +++ b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/settings.gradle.kts @@ -0,0 +1 @@ +rootProject.name = "gradle-build-caching" diff --git a/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/src/main/java/org/acme/Foo.java b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/src/main/java/org/acme/Foo.java new file mode 100644 index 00000000000000..8f4e8542598f49 --- /dev/null +++ b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/src/main/java/org/acme/Foo.java @@ -0,0 +1,4 @@ +package org.acme; + +public class Foo { +} \ No newline at end of file diff --git a/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/src/main/resources/application.properties b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/src/main/resources/application.properties new file mode 100644 index 00000000000000..9a04fd5f9b2824 --- /dev/null +++ b/devtools/gradle/gradle-application-plugin/src/test/resources/io/quarkus/gradle/tasks/crypto/main/src/main/resources/application.properties @@ -0,0 +1 @@ +someValue = ${aes-gcm-nopadding::superSecret} diff --git a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java index 10b44971e697c5..6f8cdb1146038e 100644 --- a/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java +++ b/devtools/gradle/gradle-extension-plugin/src/main/java/io/quarkus/extension/gradle/QuarkusExtensionPlugin.java @@ -56,11 +56,13 @@ private void registerTasks(Project project, QuarkusExtensionConfiguration quarku Configuration runtimeModuleClasspath = project.getConfigurations() .getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME); + TaskProvider validateExtensionTask = tasks.register(VALIDATE_EXTENSION_TASK_NAME, + ValidateExtensionTask.class, quarkusExt, runtimeModuleClasspath); + TaskProvider extensionDescriptorTask = tasks.register(EXTENSION_DESCRIPTOR_TASK_NAME, ExtensionDescriptorTask.class, quarkusExt, mainSourceSet, runtimeModuleClasspath); - TaskProvider validateExtensionTask = tasks.register(VALIDATE_EXTENSION_TASK_NAME, - ValidateExtensionTask.class, quarkusExt, runtimeModuleClasspath); + extensionDescriptorTask.configure(task -> task.dependsOn(validateExtensionTask)); project.getPlugins().withType( JavaPlugin.class, diff --git a/devtools/gradle/gradle-model/build.gradle.kts b/devtools/gradle/gradle-model/build.gradle.kts index be5055edd3d8c4..da71708ccb97c8 100644 --- a/devtools/gradle/gradle-model/build.gradle.kts +++ b/devtools/gradle/gradle-model/build.gradle.kts @@ -4,6 +4,7 @@ plugins { dependencies { compileOnly(libs.kotlin.gradle.plugin.api) + gradleApi() } group = "io.quarkus" diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java index 4d00b1055fcddb..dd34c24d8d6b1f 100644 --- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java +++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/dependency/ConditionalDependenciesEnabler.java @@ -102,12 +102,6 @@ private void collectConditionalDependencies(Set runtimeArtifac queueConditionalDependency(extension, conditionalDep); } } - - // If the extension doesn't have any conditions we just enable it by default - if (extension.getDependencyConditions().isEmpty()) { - extension.setConditional(true); - enableConditionalDependency(extension.getExtensionId()); - } } } } diff --git a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java index d2a945aa69ef13..861f1ba4c889f4 100644 --- a/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java +++ b/devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/ToolingUtils.java @@ -108,10 +108,16 @@ public static Project findIncludedProject(Project project, ExternalModuleDepende } } - final Gradle parentGradle = project.getRootProject().getGradle().getParent(); - if (parentGradle != null) { - return findIncludedProject(parentGradle.getRootProject(), dependency); - } else { + try { + final Gradle parentGradle = project.getRootProject().getGradle().getParent(); + if (parentGradle != null) { + return findIncludedProject(parentGradle.getRootProject(), dependency); + } else { + return null; + } + } catch (IllegalStateException ise) { + // This can happen if the project itself is in an included build, which means that the root-project + // is not yet known, so `DefaultGradle.getRootProject()` throws an ISE. return null; } } @@ -134,9 +140,15 @@ private static Project findIncludedBuildProject(IncludedBuild ib, ExternalModule } final DefaultIncludedBuild.IncludedBuildImpl dib = (DefaultIncludedBuild.IncludedBuildImpl) ib; - final Project rootProject = dib.getTarget().getMutableModel().getRootProject(); + try { + final Project rootProject = dib.getTarget().getMutableModel().getRootProject(); - return findLocalProject(rootProject, dependency); + return findLocalProject(rootProject, dependency); + } catch (IllegalStateException ise) { + // This can happen if the project itself is in an included build, which means that the root-project + // is not yet known, so `DefaultGradle.getRootProject()` throws an ISE. + return null; + } } public static Path serializeAppModel(ApplicationModel appModel, Task context, boolean test) throws IOException { diff --git a/devtools/gradle/gradle/libs.versions.toml b/devtools/gradle/gradle/libs.versions.toml index 6acd812b2000f8..b20bfe5d24561b 100644 --- a/devtools/gradle/gradle/libs.versions.toml +++ b/devtools/gradle/gradle/libs.versions.toml @@ -3,7 +3,7 @@ plugin-publish = "1.2.1" # updating Kotlin here makes QuarkusPluginTest > shouldNotFailOnProjectDependenciesWithoutMain(Path) fail kotlin = "1.9.22" -smallrye-config = "3.5.2" +smallrye-config = "3.6.0" junit5 = "5.10.2" assertj = "3.25.3" @@ -22,8 +22,8 @@ quarkus-project-core-extension-codestarts = { module = "io.quarkus:quarkus-proje kotlin-gradle-plugin-api = { module = "org.jetbrains.kotlin:kotlin-gradle-plugin-api", version.ref = "kotlin" } smallrye-config-yaml = { module = "io.smallrye.config:smallrye-config-source-yaml", version.ref = "smallrye-config" } -jackson-databind = {module="com.fasterxml.jackson.core:jackson-databind"} -jackson-dataformat-yaml = {module="com.fasterxml.jackson.dataformat:jackson-dataformat-yaml"} +jackson-databind = { module = "com.fasterxml.jackson.core:jackson-databind" } +jackson-dataformat-yaml = { module = "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml" } junit-bom = { module = "org.junit:junit-bom", version.ref = "junit5" } junit-api = { module = "org.junit.jupiter:junit-jupiter-api" } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java index 3025394fba8f48..e663027c9ecd8d 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/CreateProjectMojo.java @@ -276,10 +276,10 @@ public void execute() throws MojoExecutionException { } askTheUserForMissingValues(); - if (projectArtifactId != DEFAULT_ARTIFACT_ID && !OK_ID.matcher(projectArtifactId).matches()) { + if (!DEFAULT_ARTIFACT_ID.equals(projectArtifactId) && !OK_ID.matcher(projectArtifactId).matches()) { throw new MojoExecutionException(String.format(BAD_IDENTIFIER, "artifactId", projectArtifactId)); } - if (projectGroupId != DEFAULT_GROUP_ID && !OK_ID.matcher(projectGroupId).matches()) { + if (!DEFAULT_GROUP_ID.equals(projectGroupId) && !OK_ID.matcher(projectGroupId).matches()) { throw new MojoExecutionException(String.format(BAD_IDENTIFIER, "groupId", projectGroupId)); } @@ -389,16 +389,7 @@ private void askTheUserForMissingValues() throws MojoExecutionException { // If the user has disabled the interactive mode or if the user has specified the artifactId, disable the // user interactions. if (!session.getRequest().isInteractiveMode() || shouldUseDefaults()) { - if (isBlank(projectArtifactId)) { - // we need to set it for the project directory - projectArtifactId = DEFAULT_ARTIFACT_ID; - } - if (isBlank(projectGroupId)) { - projectGroupId = DEFAULT_GROUP_ID; - } - if (isBlank(projectVersion)) { - projectVersion = DEFAULT_VERSION; - } + setProperDefaults(); return; } @@ -427,12 +418,27 @@ private void askTheUserForMissingValues() throws MojoExecutionException { input -> noCode = input.startsWith("n")); prompter.collectInput(); + } else { + setProperDefaults(); } } catch (IOException e) { throw new MojoExecutionException("Unable to get user input", e); } } + private void setProperDefaults() { + if (isBlank(projectArtifactId)) { + // we need to set it for the project directory + projectArtifactId = DEFAULT_ARTIFACT_ID; + } + if (isBlank(projectGroupId)) { + projectGroupId = DEFAULT_GROUP_ID; + } + if (isBlank(projectVersion)) { + projectVersion = DEFAULT_VERSION; + } + } + private boolean shouldUseDefaults() { // Must be called before user input return projectArtifactId != null; diff --git a/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java index fb8385f87be591..bdcd48eeff5d3c 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java @@ -588,10 +588,18 @@ private String handleAutoCompile() throws MojoExecutionException { continue; } for (PluginExecution e : p.getExecutions()) { + if (e.getPhase() != null && !PRE_DEV_MODE_PHASES.contains(e.getPhase())) { + // skip executions with phases post quarkus:dev, such as install, deploy, site, etc + if (getLog().isDebugEnabled()) { + getLog().debug("Skipping " + e.getId() + " of " + p.getId()); + } + continue; + } String goalPrefix = null; if (!e.getGoals().isEmpty()) { goalPrefix = getMojoDescriptor(p, e.getGoals().get(0)).getPluginDescriptor().getGoalPrefix(); pluginPrefixes.put(goalPrefix, p); + pluginPrefixes.put(p.getId(), p); } if (e.getPhase() != null) { phaseExecutions.computeIfAbsent(e.getPhase(), k -> new ArrayList<>()).add(new PluginExec(p, goalPrefix, e)); @@ -630,7 +638,7 @@ private String handleAutoCompile() throws MojoExecutionException { if (goal.endsWith(currentGoal)) { break; } - var colon = goal.indexOf(':'); + var colon = goal.lastIndexOf(':'); if (colon >= 0) { var plugin = pluginPrefixes.get(goal.substring(0, colon)); if (plugin == null) { diff --git a/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java index 13f43790799818..b023983db78b21 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/TrackConfigChangesMojo.java @@ -1,12 +1,19 @@ package io.quarkus.maven; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; +import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Properties; +import java.util.zip.Adler32; +import java.util.zip.Checksum; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; @@ -18,6 +25,7 @@ import io.quarkus.bootstrap.app.CuratedApplication; import io.quarkus.bootstrap.classloading.QuarkusClassLoader; import io.quarkus.bootstrap.model.ApplicationModel; +import io.quarkus.maven.dependency.DependencyFlags; import io.quarkus.runtime.LaunchMode; /** @@ -58,6 +66,18 @@ public class TrackConfigChangesMojo extends QuarkusBootstrapMojo { @Parameter(defaultValue = "false", property = "quarkus.track-config-changes.dump-current-when-recorded-unavailable") boolean dumpCurrentWhenRecordedUnavailable; + /** + * Whether to dump Quarkus application dependencies along with their checksums + */ + @Parameter(defaultValue = "true", property = "quarkus.track-config-changes.dump-dependencies") + boolean dumpDependencies; + + /** + * Dependency dump file + */ + @Parameter(property = "quarkus.track-config-changes.dependencies-file") + File dependenciesFile; + @Override protected boolean beforeExecute() throws MojoExecutionException, MojoFailureException { if (skip) { @@ -82,16 +102,6 @@ protected void doExecute() throws MojoExecutionException, MojoFailureException { getLog().debug("Bootstrapping Quarkus application in mode " + launchMode); } - Path targetFile; - if (outputFile == null) { - targetFile = outputDirectory.toPath() - .resolve("quarkus-" + launchMode.getDefaultProfile() + "-config-check"); - } else if (outputFile.isAbsolute()) { - targetFile = outputFile.toPath(); - } else { - targetFile = outputDirectory.toPath().resolve(outputFile.toPath()); - } - Path compareFile; if (this.recordedBuildConfigFile == null) { compareFile = recordedBuildConfigDirectory.toPath() @@ -102,34 +112,64 @@ protected void doExecute() throws MojoExecutionException, MojoFailureException { compareFile = recordedBuildConfigDirectory.toPath().resolve(this.recordedBuildConfigFile.toPath()); } - final Properties compareProps = new Properties(); - if (Files.exists(compareFile)) { - try (BufferedReader reader = Files.newBufferedReader(compareFile)) { - compareProps.load(reader); - } catch (IOException e) { - throw new RuntimeException("Failed to read " + compareFile, e); - } - } else if (!dumpCurrentWhenRecordedUnavailable) { - getLog().info(compareFile + " not found"); + final boolean prevConfigExists = Files.exists(compareFile); + if (!prevConfigExists && !dumpCurrentWhenRecordedUnavailable && !dumpDependencies) { + getLog().info("Config dump from the previous build does not exist at " + compareFile); return; } CuratedApplication curatedApplication = null; QuarkusClassLoader deploymentClassLoader = null; final ClassLoader originalCl = Thread.currentThread().getContextClassLoader(); - Properties actualProps; final boolean clearPackageTypeSystemProperty = setPackageTypeSystemPropertyIfNativeProfileEnabled(); try { curatedApplication = bootstrapApplication(launchMode); - deploymentClassLoader = curatedApplication.createDeploymentClassLoader(); - Thread.currentThread().setContextClassLoader(deploymentClassLoader); - - final Class codeGenerator = deploymentClassLoader.loadClass("io.quarkus.deployment.CodeGenerator"); - final Method dumpConfig = codeGenerator.getMethod("dumpCurrentConfigValues", ApplicationModel.class, String.class, - Properties.class, QuarkusClassLoader.class, Properties.class, Path.class); - dumpConfig.invoke(null, curatedApplication.getApplicationModel(), - launchMode.name(), getBuildSystemProperties(true), - deploymentClassLoader, compareProps, targetFile); + if (prevConfigExists || dumpCurrentWhenRecordedUnavailable) { + final Path targetFile = getOutputFile(outputFile, launchMode.getDefaultProfile(), "-config-check"); + Properties compareProps = new Properties(); + if (prevConfigExists) { + try (BufferedReader reader = Files.newBufferedReader(compareFile)) { + compareProps.load(reader); + } catch (IOException e) { + throw new RuntimeException("Failed to read " + compareFile, e); + } + } + + deploymentClassLoader = curatedApplication.createDeploymentClassLoader(); + Thread.currentThread().setContextClassLoader(deploymentClassLoader); + + final Class codeGenerator = deploymentClassLoader.loadClass("io.quarkus.deployment.CodeGenerator"); + final Method dumpConfig = codeGenerator.getMethod("dumpCurrentConfigValues", ApplicationModel.class, + String.class, + Properties.class, QuarkusClassLoader.class, Properties.class, Path.class); + dumpConfig.invoke(null, curatedApplication.getApplicationModel(), + launchMode.name(), getBuildSystemProperties(true), + deploymentClassLoader, compareProps, targetFile); + } + + if (dumpDependencies) { + final List deps = new ArrayList<>(); + for (var d : curatedApplication.getApplicationModel().getDependencies(DependencyFlags.DEPLOYMENT_CP)) { + StringBuilder entry = new StringBuilder(d.toGACTVString()); + if (d.isSnapshot()) { + var adler32 = new Adler32(); + updateChecksum(adler32, d.getResolvedPaths()); + entry.append(" ").append(adler32.getValue()); + } + + deps.add(entry.toString()); + } + Collections.sort(deps); + final Path targetFile = getOutputFile(dependenciesFile, launchMode.getDefaultProfile(), + "-dependency-checksums.txt"); + Files.createDirectories(targetFile.getParent()); + try (BufferedWriter writer = Files.newBufferedWriter(targetFile)) { + for (var s : deps) { + writer.write(s); + writer.newLine(); + } + } + } } catch (Exception any) { throw new MojoExecutionException("Failed to bootstrap Quarkus application", any); } finally { @@ -142,4 +182,44 @@ protected void doExecute() throws MojoExecutionException, MojoFailureException { } } } + + private Path getOutputFile(File outputFile, String profile, String fileNameSuffix) { + if (outputFile == null) { + return outputDirectory.toPath().resolve("quarkus-" + profile + fileNameSuffix); + } + if (outputFile.isAbsolute()) { + return outputFile.toPath(); + } + return outputDirectory.toPath().resolve(outputFile.toPath()); + } + + private static void updateChecksum(Checksum checksum, Iterable pc) throws IOException { + for (var path : sort(pc)) { + if (Files.isDirectory(path)) { + try (DirectoryStream stream = Files.newDirectoryStream(path)) { + updateChecksum(checksum, stream); + } + } else { + checksum.update(Files.readAllBytes(path)); + } + } + } + + private static Iterable sort(Iterable original) { + var i = original.iterator(); + if (!i.hasNext()) { + return List.of(); + } + var o = i.next(); + if (!i.hasNext()) { + return List.of(o); + } + final List sorted = new ArrayList<>(); + sorted.add(o); + while (i.hasNext()) { + sorted.add(i.next()); + } + Collections.sort(sorted); + return sorted; + } } diff --git a/docs/pom.xml b/docs/pom.xml index b64f175513abd4..a3b12150163da1 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -27,7 +27,7 @@ 1.5.0-beta.8 WARN 2.26.0.Final - 21 + 28 11.1.0 6.8.0.202311291450-r @@ -2907,6 +2907,19 @@
+ + io.quarkus + quarkus-websockets-next-deployment + ${project.version} + pom + test + + + * + * + + + @@ -2922,6 +2935,7 @@ copy-resources + ${skipDocs} ${project.basedir}/target/asciidoc/sources @@ -3111,6 +3125,7 @@ exec + ${skipDocs} java -classpath @@ -3213,6 +3228,7 @@ single + ${skipDocs} assembly.xml @@ -3243,31 +3259,51 @@ - - - - - com.gradle - gradle-enterprise-maven-extension - - - - - - vale.dir - git.dir - - - - - - - - - + + unbind-skip-docs + + + skipDocs + + + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + none + + + + + org.apache.maven.plugins + maven-jar-plugin + + + default-jar + none + + + + + org.apache.maven.plugins + maven-install-plugin + + + default-install + none + + + + + + documentation-pdf @@ -3322,6 +3358,7 @@ single + ${skipDocs} assembly-pdf.xml diff --git a/docs/src/main/asciidoc/amqp-reference.adoc b/docs/src/main/asciidoc/amqp-reference.adoc index 85355b4b98e0dd..10cdff6374bae8 100644 --- a/docs/src/main/asciidoc/amqp-reference.adoc +++ b/docs/src/main/asciidoc/amqp-reference.adoc @@ -448,7 +448,7 @@ public AmqpClientOptions getNamedOptions() { .setPemKeyCertOptions(keycert) .setPemTrustOptions(trust) .addEnabledSaslMechanism("EXTERNAL") - .setHostnameVerificationAlgorithm("") + .setHostnameVerificationAlgorithm("") // Disables the hostname verification. Defaults is "HTTPS" .setConnectTimeout(30000) .setReconnectInterval(5000) .setContainerId("my-container"); diff --git a/docs/src/main/asciidoc/config-reference.adoc b/docs/src/main/asciidoc/config-reference.adoc index 1980740a757c39..870980d16646fd 100644 --- a/docs/src/main/asciidoc/config-reference.adoc +++ b/docs/src/main/asciidoc/config-reference.adoc @@ -164,6 +164,26 @@ quarkus.http.port=9090 <2> TIP: It works in the exact same way as Quarkus Application configuration file `application.properties`. Recommendation is to use Quarkus `application.properties`. +=== Locations + +Additionally to the default config locations, Quarkus provides a way to scan additional locations for configuration +properties files. + +The `quarkus.config.locations` configuration property accepts multiple locations separated by a comma `,` and each +must represent a valid `URI`. The supported `URI` schemes are: + +- file or directory (`file:`) +- classpath resource +- jar resource (`jar:`) +- http resource (`http:`) + +All loaded sources use the same ordinal of the source that found the `quarkus.config.locations` configuration +property. For instance, if `quarkus.config.locations` is set as a system property, then all loaded sources have their +ordinals set to `400` (system properties use `400` as their ordinal). The ordinal may be overridden directly for each +config source by setting the `config_ordinal` property and the ordinal value. The `config_ordinal` property only +affects the ordinal of the source in which is being set. Sources are sorted first by their ordinal, then by location +order, and finally by loading order. + === Additional Config Sources Quarkus provides additional extensions which cover other configuration formats and stores: @@ -377,7 +397,12 @@ Properties in the profile aware file have priority over profile aware properties [WARNING] ==== -The profile aware file must be present in the exact same location as the main `application.properties` file. +Do not use profile aware files to set `quarkus.profile` or `quarkus.test.profile`. This will not work because the +profile is required in advance to load the profile aware files. + +A profile aware file is only loaded if the unprofiled `application.properties` is also available in the same location +and the file extension matches between the files. This is required to keep a consistent loading order and pair all the +resources together. ==== === Parent Profile @@ -461,6 +486,12 @@ Then * `my.prop` value is 5678. * `another.prop` value is 1234. +[WARNING] +==== +Multiple profiles priority work in reverse order. With `quarkus.profile=common,dev`, Quarkus first checks the `dev` +profile and then the `common` profile. +==== + === Default Runtime Profile The default Quarkus runtime profile is set to the profile used to build the application: @@ -710,6 +741,11 @@ Maven projects could add the following goal to their `quarkus-maven-plugin` conf The `track-config-changes` goal looks for `${project.basedir}/.quarkus/quarkus-prod-config-dump` (file name and directory are configurable) and, if the file already exists, checks whether the values stored in the config dump have changed. It will log the changed options and save the current values of each of the options present in `${project.basedir}/.quarkus/quarkus-prod-config-dump` in `${project.basedir}/target/quarkus-prod-config.check` (the target file name and location can be configured). If the build time configuration has not changed since the last build both `${project.basedir}/.quarkus/quarkus-prod-config-dump` and `${project.basedir}/.quarkus/quarkus-prod-config-dump` will be identical. +==== Dump Quarkus application dependencies + +In addition to dumping configuration values, `track-config-changes` goal also dumps all the Quarkus application dependencies, including Quarkus build time dependencies, along with their checksums (Adler32). This file could be used to check whether Quarkus build classpath has changed since the previous run. +By default, the dependency checksums will be stored under `target/quarkus-prod-dependency-checksums.txt` file. A different location could be configured using plugin parameters. + ==== Dump current build configuration when the recorded configuration isn't found By default, `track-config-changes` looks for the configuration recorded during previous build and does nothing if it's not found. Enabling `dumpCurrentWhenRecordedUnavailable` parameter will make it dump the current build configuration diff --git a/docs/src/main/asciidoc/dev-services.adoc b/docs/src/main/asciidoc/dev-services.adoc index 39c49c6c55d391..d8dff1bd729e2e 100644 --- a/docs/src/main/asciidoc/dev-services.adoc +++ b/docs/src/main/asciidoc/dev-services.adoc @@ -100,6 +100,12 @@ xref:rabbitmq-dev-services.adoc[RabbitMQ Dev Services Guide]. include::{generated-dir}/config/quarkus-smallrye-reactivemessaging-rabbitmq-config-group-rabbit-mq-dev-services-build-time-config.adoc[opts=optional, leveloffset=+1] +== Pulsar + +The Pulsar Dev Service will be enabled when the `quarkus-smallrye-reactive-messaging-pulsar` extension is present in your application, and +the broker address has not been explicitly configured. More information can be found in the +xref:pulsar-dev-services.adoc[Pulsar Dev Services Guide]. + == Redis The Redis Dev Service will be enabled when the `quarkus-redis-client` extension is present in your application, and diff --git a/docs/src/main/asciidoc/dev-ui.adoc b/docs/src/main/asciidoc/dev-ui.adoc index 42454fba55d291..b7b2d52f966a84 100644 --- a/docs/src/main/asciidoc/dev-ui.adoc +++ b/docs/src/main/asciidoc/dev-ui.adoc @@ -684,7 +684,7 @@ image::dev-ui-qui-code-block-v2.png[alt=Dev UI Code Block,role="center"] [source,javascript] ---- -import 'qui-code-block'; +import '@quarkus-webcomponents/codeblock'; ---- [source,html] diff --git a/docs/src/main/asciidoc/extension-metadata.adoc b/docs/src/main/asciidoc/extension-metadata.adoc index 4ee43cc70da034..ba9f6231b18ba8 100644 --- a/docs/src/main/asciidoc/extension-metadata.adoc +++ b/docs/src/main/asciidoc/extension-metadata.adoc @@ -102,8 +102,7 @@ metadata: description: "A Jakarta REST implementation utilizing build time processing and Vert.x.\ \ This extension is not compatible with the quarkus-resteasy extension, or any of\ \ the extensions that depend on it." <4> -scm: - url: "https://github.com/quarkusio/quarkus" <5> +scm-url: "https://github.com/quarkusio/quarkus" <5> sponsor: A Sponsoring Organisation <6> ---- @@ -111,7 +110,7 @@ sponsor: A Sponsoring Organisation <6> <2> https://quarkus.io/guides/capabilities[Capabilities] this extension provides <3> Direct dependencies on other extensions <4> Description that can be displayed to users. In this case, the description was copied from the `pom.xml` of the extension module but it could also be provided in the template file. -<5> The source code repository of this extension. Optional, and will often be set automatically. In GitHub Actions builds, it will be inferred from the CI environment. For other GitHub repositories, it can be controlled by setting a `GITHUB_REPOSITORY` environment variable. +<5> The source code repository of this extension. Optional, and will often be set automatically using the `` information in the pom. In GitHub Actions builds, it will be inferred from the CI environment. For other GitHub repositories, it can be controlled by setting a `GITHUB_REPOSITORY` environment variable. <6> The sponsor(s) of this extension. Optional, and will sometimes be determined automatically from commit history. [[quarkus-extension-properties]] diff --git a/docs/src/main/asciidoc/getting-started-testing.adoc b/docs/src/main/asciidoc/getting-started-testing.adoc index 63ef64af6eeb1c..76c6c9c149937b 100644 --- a/docs/src/main/asciidoc/getting-started-testing.adoc +++ b/docs/src/main/asciidoc/getting-started-testing.adoc @@ -843,7 +843,20 @@ So if you need to call methods such as `verify` you should hang on to the mock i ==== Further simplification with `@InjectMock` Building on the features provided by `QuarkusMock`, Quarkus also allows users to effortlessly take advantage of link:https://site.mockito.org/[Mockito] for mocking the beans supported by `QuarkusMock`. -This functionality is available with the `@io.quarkus.test.InjectMock` annotation if the `quarkus-junit5-mockito` dependency is present. + +[IMPORTANT] +==== +This functionality is available with the `@io.quarkus.test.InjectMock` annotation **only if** the `quarkus-junit5-mockito` dependency is present: +[source,xml] +---- + + io.quarkus + quarkus-junit5-mockito + test + +---- + +==== Using `@InjectMock`, the previous example could be written as follows: @@ -1666,11 +1679,14 @@ The fields annotated with `@Inject` and `@InjectMock` are injected after a test Finally, the CDI request context is activated and terminated per each test method. === Injection + Test class fields annotated with `@jakarta.inject.Inject` and `@io.quarkus.test.InjectMock` are injected after a test instance is created. Dependent beans injected into these fields are correctly destroyed before a test instance is destroyed. Parameters of a test method for which a matching bean exists are resolved unless annotated with `@io.quarkus.test.component.SkipInject`. Dependent beans injected into the test method arguments are correctly destroyed after the test method completes. +NOTE: Arguments of a `@ParameterizedTest` method that are provided by an `ArgumentsProvider`, for example with `@org.junit.jupiter.params.provider.ValueArgumentsProvider`, must be annotated with `@SkipInject`. + === Auto Mocking Unsatisfied Dependencies Unlike in regular CDI environments the test does not fail if a component injects an unsatisfied dependency. diff --git a/docs/src/main/asciidoc/grpc-reference.adoc b/docs/src/main/asciidoc/grpc-reference.adoc new file mode 100644 index 00000000000000..5cfd3e5748873b --- /dev/null +++ b/docs/src/main/asciidoc/grpc-reference.adoc @@ -0,0 +1,232 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// += gRPC reference guide +include::_attributes.adoc[] +:categories: Serialization +:diataxis-type: Reference +:summary: Learn how to configure gRPC server and clients. +:topics: grpc +:extensions: io.quarkus:quarkus-grpc + + +== Using gRPC with Quarkus + +If you need to implement a gRPC service or consume it, you need the `quarkus-grpc` extension. +It handles both sides. + +=== Using Maven + +To enable gRPC, add the following dependency to your project: + +[source,xml,subs=attributes+] +---- + + io.quarkus + quarkus-grpc + +---- + +Next, ensure that the `generate-code` phase is enabled in the Quarkus Maven plugin: + +[source,xml,subs=attributes+] +---- + + ${quarkus.platform.group-id} + quarkus-maven-plugin + ${quarkus.platform.version} + true + + + + build + generate-code + generate-code-tests + + + + +---- + +=== Using Gradle + +For Gradle, add the following dependency to your project: + +[source,gradle,subs=attributes+] +---- +implementation 'io.quarkus:quarkus-grpc' +---- + +== Selecting a gRPC server + +Quarkus provides two implementation of the gRPC server: gRPC Java (based on Netty) and Vert.x. +Both of them support TLS. + +One of the advantage of the Vert.x based server is the ability to use a single server to handle HTTP requests and gRPC requests. This is useful if you want to expose both REST and gRPC endpoints on the same port. This is not possible with the gRPC Java server (using a separate server). + +To select the gRPC server implementation, set the `quarkus.grpc.server.use-separate-server` property in your `application.properties` file: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.server.use-separate-server=false # Use the Vert.x based server +---- + +We recommend the usage of the Vert.x based gRPC server, as it is more flexible and better integrated in the Quarkus ecosystem. + +IMPORTANT: You cannot use both servers at the same time. + +== Selecting gRPC clients + +As for the server, Quarkus proposes two alternatives for the gRPC clients: gRPC Java and Vert.x. +Unlike for the server, you can select the transport for each client: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.clients.hello.use-quarkus-grpc-client=true # Use client using the Vert.x based transport +---- + +While it's not the default, we recommend using the Vert.x based client, as it is more flexible and better integrated in the Quarkus ecosystem. +It does not change the stubs you can use, as they are generated by the gRPC framework. +However, it changes the way the client communicates with the server. + +== Configuring TLS for gRPC services + +=== With the Vert.x based server + +If you use the Vert.x based server, you can configure TLS by setting the following properties in your `application.properties` file: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.server.use-separate-server=false + +quarkus.grpc.server.plain-text=false +quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-tls-keystore.p12 +quarkus.http.ssl.certificate.key-store-password=***** +quarkus.http.insecure-requests=disabled +---- + +You can use `key-store-file` and `key-store-password` to configure the keystore file and its password when using JKS or P12. For PEM, use the `certificate` and `key` properties: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.server.use-separate-server=false + +quarkus.grpc.server.plain-text=false +quarkus.http.ssl.certificate.files=target/certs/grpc-tls.crt +quarkus.http.ssl.certificate.key-files=target/certs/grpc-tls.key +quarkus.http.insecure-requests=disabled +---- + +NOTE: The `quarkus.http.insecure-requests` property is used to disable insecure requests. + +NOTE: When TLS is enabled, it covers both HTTP and gRPC traffic. + +=== With the gRPC Java server + +If you use the gRPC Java server, you can configure TLS by setting the following properties in your `application.properties` file: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.server.ssl.certificate=tls/server.pem +quarkus.grpc.server.ssl.key=tls/server.key + +quarkus.grpc.server.plain-text=false +---- + +This server only supports `PEM` format for the certificate and the key. + +== Configuring TLS for gRPC clients + +When using the Vert.x based client, you can configure TLS by setting the following properties in your `application.properties` file: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.clients.hello.plain-text=false # Use TLS +quarkus.grpc.clients.hello.use-quarkus-grpc-client=true # Use client using the Vert.x based transport +quarkus.grpc.clients.hello.tls.enabled=true +quarkus.grpc.clients.hello.tls.trust-certificate-p12.path=target/certs/grpc-tls-truststore.jks +quarkus.grpc.clients.hello.tls.trust-certificate-p12.password=**** +---- + +If you use JKS trust-store, use the following configuration: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.clients.hello.plain-text=false # Use TLS +quarkus.grpc.clients.hello.use-quarkus-grpc-client=true # Use client using the Vert.x based transport +quarkus.grpc.clients.hello.tls.enabled=true +quarkus.grpc.clients.hello.tls.trust-certificate-jks.path=target/certs/grpc-tls-truststore.jks +quarkus.grpc.clients.hello.tls.trust-certificate-jks.password=**** +---- + +If you use PEM certificates as trust-store, use the following configuration: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.clients.hello.plain-text=false # Use TLS +quarkus.grpc.clients.hello.use-quarkus-grpc-client=true # Use client using the Vert.x based transport +quarkus.grpc.clients.hello.tls.enabled=true +quarkus.grpc.clients.hello.tls.trust-certificate-pem.certs=target/certs/grpc-client-ca.crt +---- + +When using the gRPC Java client, you can configure TLS by setting the following properties in your `application.properties` file: + +[source,properties,subs=attributes+] +---- +quarkus.grpc.clients.hello.ssl.trust-store=target/certs/grpc-client-tls-ca.crt +---- + +gRPC Java client only support the `PEM` format for the trust-store. + +== Configuring mTLS + +When using the Vert.x based server and Vert.x-based client, you can configure mTLS by setting the following properties in your `application.properties` file: + +[source,properties,subs=attributes+] +---- +# Server side: +quarkus.grpc.server.use-separate-server=false +quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests +quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks +quarkus.http.ssl.certificate.key-store-password=**** +quarkus.http.ssl.certificate.trust-store-file=target/certs/grpc-server-truststore.jks +quarkus.http.ssl.certificate.trust-store-password=**** +quarkus.http.ssl.client-auth=REQUIRED # Force the client to authenticate, aka mTLS +quarkus.http.insecure-requests=disabled + +# Client side: +quarkus.grpc.clients.hello.plain-text=false +quarkus.grpc.clients.hello.tls.trust-certificate-jks.path=target/certs/grpc-client-truststore.jks +quarkus.grpc.clients.hello.tls.trust-certificate-jks.password=**** +quarkus.grpc.clients.hello.tls.key-certificate-jks.path=target/certs/grpc-client-keystore.jks +quarkus.grpc.clients.hello.tls.key-certificate-jks.password=**** +quarkus.grpc.clients.hello.tls.enabled=true +quarkus.grpc.clients.hello.use-quarkus-grpc-client=true +---- + +If you use P12 format for the trust-store and the key-certificate, use the following configuration: + +[source,properties,subs=attributes+] +---- +# Server side +quarkus.grpc.server.use-separate-server=false +quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests +quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.p12 +quarkus.http.ssl.certificate.key-store-password=**** +quarkus.http.ssl.certificate.trust-store-file=target/certs/grpc-server-truststore.p12 +quarkus.http.ssl.certificate.trust-store-password=**** +quarkus.http.ssl.client-auth=REQUIRED # Force the client to authenticate, aka mTLS +quarkus.http.insecure-requests=disabled + +# Client side +quarkus.grpc.clients.hello.plain-text=false +quarkus.grpc.clients.hello.tls.trust-certificate-p12.path=target/certs/grpc-client-truststore.p12 +quarkus.grpc.clients.hello.tls.trust-certificate-p12.password=**** +quarkus.grpc.clients.hello.tls.key-certificate-p12.path=target/certs/grpc-client-keystore.p12 +quarkus.grpc.clients.hello.tls.key-certificate-p12.password=**** +quarkus.grpc.clients.hello.tls.enabled=true +quarkus.grpc.clients.hello.use-quarkus-grpc-client=true +---- + diff --git a/docs/src/main/asciidoc/grpc.adoc b/docs/src/main/asciidoc/grpc.adoc index 8709b03a2e58d6..d0a7abe6bb1514 100644 --- a/docs/src/main/asciidoc/grpc.adoc +++ b/docs/src/main/asciidoc/grpc.adoc @@ -35,3 +35,4 @@ Quarkus gRPC is based on https://vertx.io/docs/vertx-grpc/java/[Vert.x gRPC]. * xref:grpc-kubernetes.adoc[Deploying your gRPC Service in Kubernetes] * xref:grpc-xds.adoc[Enabling xDS gRPC support] * xref:grpc-generation-reference.adoc[gRPC code generation reference guide] +* xref:grpc-reference.adoc[gRPC reference guide] diff --git a/docs/src/main/asciidoc/hibernate-orm-panache.adoc b/docs/src/main/asciidoc/hibernate-orm-panache.adoc index 80422dd69a1a5d..a8fcb8a47eb32f 100644 --- a/docs/src/main/asciidoc/hibernate-orm-panache.adoc +++ b/docs/src/main/asciidoc/hibernate-orm-panache.adoc @@ -746,22 +746,23 @@ The `Sort` class has plenty of methods for adding columns and specifying sort di Normally, HQL queries are of this form: `from EntityName [where ...] [order by ...]`, with optional elements at the end. -If your select query does not start with `from`, we support the following additional forms: +If your select query does not start with `from`, `select` or `with`, we support the following additional forms: - `order by ...` which will expand to `from EntityName order by ...` -- `` (and single parameter) which will expand to `from EntityName where = ?` +- `` (and single parameter) which will expand to `from EntityName where = ?` +- `where ` will expand to `from EntityName where ` - `` will expand to `from EntityName where ` If your update query does not start with `update`, we support the following additional forms: - `from EntityName ...` which will expand to `update EntityName ...` -- `set? ` (and single parameter) which will expand to `update EntityName set = ?` +- `set? ` (and single parameter) which will expand to `update EntityName set = ?` - `set? ` will expand to `update EntityName set ` If your delete query does not start with `delete`, we support the following additional forms: - `from EntityName ...` which will expand to `delete from EntityName ...` -- `` (and single parameter) which will expand to `delete from EntityName where = ?` +- `` (and single parameter) which will expand to `delete from EntityName where = ?` - `` will expand to `delete from EntityName where ` NOTE: You can also write your queries in plain diff --git a/docs/src/main/asciidoc/hibernate-orm.adoc b/docs/src/main/asciidoc/hibernate-orm.adoc index e682cc45ceda68..3923a512e4327b 100644 --- a/docs/src/main/asciidoc/hibernate-orm.adoc +++ b/docs/src/main/asciidoc/hibernate-orm.adoc @@ -1350,11 +1350,6 @@ and annotating the implementation with the appropriate qualifiers: @JsonFormat // <1> @PersistenceUnitExtension // <2> public class MyJsonFormatMapper implements FormatMapper { // <3> - @Override - public String inspect(String sql) { - // ... - return sql; - } @Override public T fromString(CharSequence charSequence, JavaType javaType, WrapperOptions wrapperOptions) { // ... @@ -1382,11 +1377,6 @@ In case of a custom XML format mapper, a different CDI qualifier must be applied @XmlFormat // <1> @PersistenceUnitExtension // <2> public class MyJsonFormatMapper implements FormatMapper { // <3> - @Override - public String inspect(String sql) { - // ... - return sql; - } @Override public T fromString(CharSequence charSequence, JavaType javaType, WrapperOptions wrapperOptions) { // ... diff --git a/docs/src/main/asciidoc/hibernate-reactive-panache.adoc b/docs/src/main/asciidoc/hibernate-reactive-panache.adoc index f2decd5f51ec93..5a649ea3ba2f64 100644 --- a/docs/src/main/asciidoc/hibernate-reactive-panache.adoc +++ b/docs/src/main/asciidoc/hibernate-reactive-panache.adoc @@ -518,22 +518,23 @@ The `Sort` class has plenty of methods for adding columns and specifying sort di Normally, HQL queries are of this form: `from EntityName [where ...] [order by ...]`, with optional elements at the end. -If your select query does not start with `from`, we support the following additional forms: +If your select query does not start with `from`, `select` or `with`, we support the following additional forms: - `order by ...` which will expand to `from EntityName order by ...` -- `` (and single parameter) which will expand to `from EntityName where = ?` +- `` (and single parameter) which will expand to `from EntityName where = ?` +- `where ` will expand to `from EntityName where ` - `` will expand to `from EntityName where ` If your update query does not start with `update`, we support the following additional forms: -- `from EntityName ...` which will expand to `update from EntityName ...` -- `set? ` (and single parameter) which will expand to `update from EntityName set = ?` -- `set? ` will expand to `update from EntityName set ` +- `from EntityName ...` which will expand to `update EntityName ...` +- `set? ` (and single parameter) which will expand to `update EntityName set = ?` +- `set? ` will expand to `update EntityName set ` If your delete query does not start with `delete`, we support the following additional forms: - `from EntityName ...` which will expand to `delete from EntityName ...` -- `` (and single parameter) which will expand to `delete from EntityName where = ?` +- `` (and single parameter) which will expand to `delete from EntityName where = ?` - `` will expand to `delete from EntityName where ` NOTE: You can also write your queries in plain diff --git a/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc index 9e5d14a1593560..7b36abc3243376 100644 --- a/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc +++ b/docs/src/main/asciidoc/hibernate-search-orm-elasticsearch.adoc @@ -759,7 +759,7 @@ as shown below. [source,properties] ---- -quarkus.hibernate-search-orm.elasticsearch.version=opensearch:1.2 +quarkus.hibernate-search-orm.elasticsearch.version=opensearch:2.11 ---- All other configuration options and APIs are exactly the same as with Elasticsearch. diff --git a/docs/src/main/asciidoc/http-reference.adoc b/docs/src/main/asciidoc/http-reference.adoc index 22c9db0150bcc0..dbab826dc68cba 100644 --- a/docs/src/main/asciidoc/http-reference.adoc +++ b/docs/src/main/asciidoc/http-reference.adoc @@ -239,8 +239,8 @@ Configure the `quarkus.http.ssl.certificate.reload-period` property to specify t [source, properties] ---- -quarkus.http.ssl.certificate.files=/mount/certs/cert.pem -quarkus.http.ssl.certificate.key-files=/mount/certs/key.pem +quarkus.http.ssl.certificate.files=/mount/certs/tls.crt +quarkus.http.ssl.certificate.key-files=/mount/certs/tls.key quarkus.http.ssl.certificate.reload-period=1h ---- diff --git a/docs/src/main/asciidoc/images/auth0-devui-jwt-accesstoken.png b/docs/src/main/asciidoc/images/auth0-devui-jwt-accesstoken.png new file mode 100644 index 00000000000000..da90d8d5d680e4 Binary files /dev/null and b/docs/src/main/asciidoc/images/auth0-devui-jwt-accesstoken.png differ diff --git a/docs/src/main/asciidoc/kafka.adoc b/docs/src/main/asciidoc/kafka.adoc index 120a2977b7e52c..35255f98a82a74 100644 --- a/docs/src/main/asciidoc/kafka.adoc +++ b/docs/src/main/asciidoc/kafka.adoc @@ -2048,14 +2048,31 @@ mp.messaging.incoming.data.tracing-enabled=false If the xref:telemetry-micrometer.adoc[Micrometer extension] is present, then Kafka producer and consumer clients metrics are exposed as Micrometer meters. -Per channel metrics are also exposed as Micrometer meters. -The number of messages produced or received per channel, acknowledgments and duration of processing are exposed. +=== Channel metrics -The messaging meters can be disabled: +Per channel metrics can also be gathered and exposed as Micrometer meters. +Following metrics can be gathered per channel, identified with the _channel_ tag: + +* `quarkus.messaging.message.count` : The number of messages produced or received +* `quarkus.messaging.message.acks` : The number of messages processed successfully +* `quarkus.messaging.message.failures` : The number of messages processed with failures +* `quarkus.messaging.message.duration` : The duration of the message processing. + +For backwards compatibility reasons channel metrics are not enabled by default and can be enabled with: + +[IMPORTANT] +==== +The https://smallrye.io/smallrye-reactive-messaging/latest/concepts/observability/[message observation] +depends on intercepting messages and therefore doesn't support channels consuming messages with +a custom message type such as `IncomingKafkaRecord`, `KafkaRecord`, `IncomingKafkaRecordBatch` or `KafkaRecordBatch`. + +The message interception, and observation, still work with channels consuming the generic `Message` type, +or custom payloads enabled by https://smallrye.io/smallrye-reactive-messaging/latest/concepts/converters/[converters]. +==== [source, properties] ---- -quarkus.micrometer.binder.messaging.enabled=false +smallrye.messaging.observation.enabled=true ---- diff --git a/docs/src/main/asciidoc/management-interface-reference.adoc b/docs/src/main/asciidoc/management-interface-reference.adoc index 345592ca61f9dd..0f245b3875795e 100644 --- a/docs/src/main/asciidoc/management-interface-reference.adoc +++ b/docs/src/main/asciidoc/management-interface-reference.adoc @@ -58,6 +58,20 @@ quarkus.management.ssl.certificate.key-store-file=server-keystore.jks quarkus.management.ssl.certificate.key-store-password=secret ---- +Key store, trust store and certificate files can be reloaded periodically. +Configure the `quarkus.management.ssl.certificate.reload-period` property to specify the interval at which the certificates should be reloaded: + +[source, properties] +---- +quarkus.http.management.certificate.files=/mount/certs/tls.crt +quarkus.http.management.certificate.key-files=/mount/certs/tls.key +quarkus.http.management.certificate.reload-period=1h +---- + +The files are reloaded from the same location as they were initially loaded from. +If there is no content change, the reloading is a no-op. +It the reloading fails, the server will continue to use the previous certificates. + IMPORTANT: Unlike the main HTTP server, the management interface does not handle _http_ and _https_ at the same time. If _https_ is configured, plain HTTP requests will be rejected. diff --git a/docs/src/main/asciidoc/picocli.adoc b/docs/src/main/asciidoc/picocli.adoc index 1d0edcf3ec2413..4ec8182a34c419 100644 --- a/docs/src/main/asciidoc/picocli.adoc +++ b/docs/src/main/asciidoc/picocli.adoc @@ -337,10 +337,6 @@ metadata: name: app spec: completionMode: NonIndexed - selector: - matchLabels: - app.kubernetes.io/name: app - app.kubernetes.io/version: 0.1-SNAPSHOT suspend: false template: metadata: diff --git a/docs/src/main/asciidoc/qute-reference.adoc b/docs/src/main/asciidoc/qute-reference.adoc index ba56d9283048c2..799ac39c1b0bb2 100644 --- a/docs/src/main/asciidoc/qute-reference.adoc +++ b/docs/src/main/asciidoc/qute-reference.adoc @@ -2654,6 +2654,12 @@ The configration value is a regular expression that matches the template path re For example, `quarkus.qute.dev-mode.no-restart-templates=templates/foo.html` matches the template `src/main/resources/templates/foo.html`. The matching templates are reloaded and only runtime validations are performed. +=== Testing + +In the test mode, the rendering results of injected and type-safe templates are recorded in the managed `io.quarkus.qute.RenderedResults` which is registered as a CDI bean. +You can inject `RenderedResults` in a test or any other CDI bean and assert the results. +However, it's possible to set the `quarkus.qute.test-mode.record-rendered-results` configuration property to `false` to disable this feature. + [[type-safe-message-bundles]] === Type-safe Message Bundles diff --git a/docs/src/main/asciidoc/redis-reference.adoc b/docs/src/main/asciidoc/redis-reference.adoc index 12ec1bb96f9102..24709b8fe4c517 100644 --- a/docs/src/main/asciidoc/redis-reference.adoc +++ b/docs/src/main/asciidoc/redis-reference.adoc @@ -173,10 +173,16 @@ quarkus.redis.hosts=redis://localhost:5000,redis://localhost:5001,redis://localh quarkus.redis.client-type=sentinel # Optional -quarkus.redis.master-name=my-sentinel # Default is my-master +quarkus.redis.master-name=my-sentinel # Default is mymaster quarkus.redis.role=master # master is the default ---- +The host URLs here must be the sentinel servers. +The client will obtain the URLs of actual Redis servers (master or replicas, depending on `role`) from one of the sentinels, using the `master-name` as an identifier of the "master set". + +Note that you practically never want to configure `quarkus.redis.role=sentinel`. +This setting means that the Redis client will execute commands directly on one of the sentinel servers, instead of an actual Redis server guarded by the sentinels. + === Use the Cluster Mode When using Redis in cluster mode, you need to pass multiple _host urls_, configure the client type to `cluster` and configure the `replicas` mode: @@ -188,6 +194,10 @@ quarkus.redis.client-type=cluster quarkus.redis.replicas=share ---- +The host URLs here must be some of the cluster members. +Not all cluster members need to be configured, as the client will obtain a full cluster topology from one of the known servers. +However, it is advisable to configure at least 2 or 3 nodes, not just 1. + === Use the replication Mode When using the replication mode, you need to pass a single host url and configure the type to be `replication`: @@ -215,6 +225,8 @@ To use TLS, you need to: 1. Set the `quarkus.redis.tls.enabled=true` property 2. Make sure that your URL starts with `rediss://` (with two `s`) +IMPORTANT: The default hostname verifier is set to `NONE`, meaning it does not verify the host name. You can change this behavior by setting the `quarkus.redis.tls.hostname-verification-algorithm` property, to `HTTPS` for example. + === Configure the authentication The Redis password can be set in the `redis://` URL or with the `quarkus.redis.password` property. diff --git a/docs/src/main/asciidoc/rest-client-reactive.adoc b/docs/src/main/asciidoc/rest-client-reactive.adoc index 21e3b34086a451..7203210e58b95b 100644 --- a/docs/src/main/asciidoc/rest-client-reactive.adoc +++ b/docs/src/main/asciidoc/rest-client-reactive.adoc @@ -1681,7 +1681,14 @@ quarkus.rest-client.logging.body-limit=50 quarkus.log.category."org.jboss.resteasy.reactive.client.logging".level=DEBUG ---- -TIP: REST Client Reactive uses a default `ClientLogger` implementation. You can change it by providing a custom `ClientLogger` instance through CDI or when programmatically creating your client. +[TIP] +==== +REST Client Reactive uses a default `ClientLogger` implementation, which can be swapped out for a custom implementation. + +When setting up the client programmatically using the `QuarkusRestClientBuilder`, the `ClientLogger` is set via the `clientLogger` method. + +For declarative clients using `@RegisterRestClient`, simply providing a CDI bean that implements `ClientLogger` is enough for that logger to be used by said clients. +==== == Mocking the client for tests If you use a client injected with the `@RestClient` annotation, you can easily mock it for tests. diff --git a/docs/src/main/asciidoc/resteasy-reactive-migration.adoc b/docs/src/main/asciidoc/resteasy-reactive-migration.adoc index e46aad7d9fd3a6..09eb2e0a35aa66 100644 --- a/docs/src/main/asciidoc/resteasy-reactive-migration.adoc +++ b/docs/src/main/asciidoc/resteasy-reactive-migration.adoc @@ -123,6 +123,10 @@ Quarkus uses smart defaults when determining the media type of Jakarta REST meth The difference between `quarkus-resteasy-reactive` and `quarkus-resteasy` is the use of `text/plain` as the default media type instead of `text/html` when the method returns a `String`. +=== Injection of `@SessionScoped` beans + +`@SessionScoped` beans are currently not supported. Should you really need this functionality, you'll need to use RESTEasy Classic instead of RESTEasy Reactive. + === Servlets RESTEasy Reactive does **not** support servlets. diff --git a/docs/src/main/asciidoc/resteasy-reactive.adoc b/docs/src/main/asciidoc/resteasy-reactive.adoc index 52278efc7003cf..0670c4e9102e60 100644 --- a/docs/src/main/asciidoc/resteasy-reactive.adoc +++ b/docs/src/main/asciidoc/resteasy-reactive.adoc @@ -1005,6 +1005,124 @@ public class Endpoint { } ---- +=== Concurrent stream element processing + +By default, `RestMulti` ensures serial/sequential order of the items/elements produced by the wrapped +`Multi` by using a value of 1 for the demand signaled to the publishers. To enable concurrent +processing/generation of multiple items, use `withDemand(long demand)`. + +Using a demand higher than 1 is useful when multiple items shall be returned and the production of each +item takes some time, i.e. when parallel/concurrent production improves the service response time. Be +aware the concurrent processing also requires more resources and puts a higher load on services or +resources that are needed to produce the items. Also consider using `Multi.capDemandsTo(long)` and +`Multi.capDemandsUsing(LongFunction)`. + +The example below produces 5 (JSON) strings, but the _order_ of the strings in the returned JSON array +is not guaranteed. The below example also works for JSON objects and not just simple types. + +[source,java] +---- +package org.acme.rest; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; + +import io.smallrye.mutiny.Multi; +import org.jboss.resteasy.reactive.RestMulti; + +@Path("message-stream") +public class Endpoint { + @GET + public Multi streamMessages() { + Multi sourceMulti = Multi + .createBy() + .merging() + .streams( + Multi.createFrom().items( + "message-1", + "message-2", + "message-3", + "message-4", + "message-5" + ) + ); + + return RestMulti + .fromMultiData(sourceMulti) + .withDemand(5) + .build(); + } +} +---- + +Example response, the order is non-deterministic. + +[source,text] +---- +"message-3" +"message-5" +"message-4" +"message-1" +"message-2" +---- + +=== Returning multiple JSON objects + +By default, `RestMulti` returns items/elements produced by the wrapped `Multi` as a JSON array, if the +media-type is `application/json`. To return separate JSON objects that are not wrapped in a JSON array, +use `encodeAsArray(false)` (`encodeAsArray(true)` is the default). Note that streaming multiple +objects this way requires a slightly different parsing on the client side, but objects can be parsed and +consumed as they appear without having to deserialize a possibly huge result at once. + +The example below produces 5 (JSON) strings, that are not wrapped in an array, like this: + +[source,text] +---- +"message-1" +"message-2" +"message-3" +"message-4" +"message-5" +---- + +[source,java] +---- +package org.acme.rest; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; + +import io.smallrye.mutiny.Multi; +import org.jboss.resteasy.reactive.RestMulti; + +@Path("message-stream") +public class Endpoint { + @GET + public Multi streamMessages() { + Multi sourceMulti = Multi + .createBy() + .merging() + .streams( + Multi.createFrom().items( + "message-1", + "message-2", + "message-3", + "message-4", + "message-5" + ) + ); + + return RestMulti + .fromMultiData(sourceMulti) + .encodeAsJsonArray(false) + .build(); + } +} +---- + + === Server-Sent Event (SSE) support If you want to stream JSON objects in your response, you can use diff --git a/docs/src/main/asciidoc/scheduler-reference.adoc b/docs/src/main/asciidoc/scheduler-reference.adoc index 25fa988a583150..b2374725ae3a19 100644 --- a/docs/src/main/asciidoc/scheduler-reference.adoc +++ b/docs/src/main/asciidoc/scheduler-reference.adoc @@ -34,12 +34,16 @@ Furthermore, the annotated method must return `void` and either declare no param TIP: The annotation is repeatable so a single method could be scheduled multiple times. -[WARNING] -==== -Subclasses never inherit the metadata of a `@Scheduled` method declared on a superclass. In the following example, the `everySecond()` method is only invoked upon the instance of `Jobs`. +=== Inheritance of metadata + +A subclass never inherits the metadata of a `@Scheduled` method declared on a superclass. +For example, suppose the class `org.amce.Foo` is extended by the class `org.amce.Bar`. +If `Foo` declares a non-static method annotated with `@Scheduled` then `Bar` does not inherit the metadata of the scheduled method. +In the following example, the `everySecond()` method is only invoked upon the instance of `Foo`. + [source,java] ---- -class Jobs { +class Foo { @Scheduled(every = "1s") void everySecond() { @@ -48,12 +52,38 @@ class Jobs { } @Singleton -class MyJobs extends Jobs { +class Bar extends Foo { } ---- -==== -A CDI event of type `io.quarkus.scheduler.SuccessfulExecution` is fired synchronously and asynchronously when an execution of a scheduled method is successful. A CDI event of type `io.quarkus.scheduler.FailedExecution` is fired synchronously and asynchronously when an execution of a scheduled method throws an exception. +=== CDI events + +Some CDI events are fired synchronously and asynchronously when specific events occur. + +|=== +|Type |Event description + +|`io.quarkus.scheduler.SuccessfulExecution` +|An execution of a scheduled job completed successfuly. + +|`io.quarkus.scheduler.FailedExecution` +|An execution of a scheduled job completed with an exception. + +|`io.quarkus.scheduler.SkippedExecution` +|An execution of a scheduled job was skipped. + +|`io.quarkus.scheduler.SchedulerPaused` +|The scheduler was paused. + +|`io.quarkus.scheduler.SchedulerResumed` +|The scheduler was resumed. + +|`io.quarkus.scheduler.ScheduledJobPaused` +|A scheduled job was paused. + +|`io.quarkus.scheduler.ScheduledJobResumed` +|A scheduled job was resumed. +|=== === Triggers @@ -134,7 +164,7 @@ void myMethod() { } An interval trigger defines a period between invocations. The period expression is based on the ISO-8601 duration format `PnDTnHnMn.nS` and the value of `@Scheduled#every()` is parsed with `java.time.Duration#parse(CharSequence)`. -However, if an expression starts with a digit then the `PT` prefix is added automatically. +However, if an expression starts with a digit and ends with `d`, `P` prefix will be added automatically. If the expression only starts with a digit, `PT` prefix is added automatically. So for example, `15m` can be used instead of `PT15M` and is parsed as "15 minutes". .Interval Trigger Example @@ -207,7 +237,7 @@ NOTE: The final value is always rounded to full second. `@Scheduled#delayed()` is a text alternative to the properties above. The period expression is based on the ISO-8601 duration format `PnDTnHnMn.nS` and the value is parsed with `java.time.Duration#parse(CharSequence)`. -However, if an expression starts with a digit, the `PT` prefix is added automatically. +However, if an expression starts with a digit and ends with `d`, `P` prefix will be added automatically. If the expression only starts with a digit, `PT` prefix is added automatically. So for example, `15s` can be used instead of `PT15S` and is parsed as "15 seconds". [source,java] diff --git a/docs/src/main/asciidoc/security-authentication-mechanisms.adoc b/docs/src/main/asciidoc/security-authentication-mechanisms.adoc index 76bfeb30693fcd..af7e03bcbdda88 100644 --- a/docs/src/main/asciidoc/security-authentication-mechanisms.adoc +++ b/docs/src/main/asciidoc/security-authentication-mechanisms.adoc @@ -81,6 +81,42 @@ The resulting digest is used as a key for AES-256 encryption of the cookie value The cookie contains an expiry time as part of the encrypted value, so all nodes in the cluster must have their clocks synchronized. At one-minute intervals, a new cookie gets generated with an updated expiry time if the session is in use. +To get started with form authentication, you should have similar settings as described in xref:security-basic-authentication-howto.adoc[Enable Basic authentication] and property `quarkus.http.auth.form.enabled` must be set to `true`. + +Simple `application.properties` with form-base authentication can look similar to this: +[source,properties] +---- +quarkus.http.auth.form.enabled=true + +quarkus.http.auth.form.login-page=login.html +quarkus.http.auth.form.landing-page=hello +quarkus.http.auth.form.error-page= + +# Define testing user +quarkus.security.users.embedded.enabled=true +quarkus.security.users.embedded.plain-text=true +quarkus.security.users.embedded.users.alice=alice +quarkus.security.users.embedded.roles.alice=user +---- + +[IMPORTANT] +==== +Configuring user names, secrets, and roles in the application.properties file is appropriate only for testing scenarios. For securing a production application, it is crucial to use a database or LDAP to store this information. For more information you can take a look at xref:security-jpa.adoc[Quarkus Security with Jakarta Persistence] or other mentioned in xref:security-basic-authentication-howto.adoc[Enable Basic authentication]. +==== + +and application login page will contain HTML form similar to this: + +[source,html] +---- +
+ + + + + +
+---- + With single-page applications (SPA), you typically want to avoid redirects by removing default page paths, as shown in the following example: [source,properties] diff --git a/docs/src/main/asciidoc/security-authorize-web-endpoints-reference.adoc b/docs/src/main/asciidoc/security-authorize-web-endpoints-reference.adoc index d0e4bb48dc2c5e..723d3aaf279072 100644 --- a/docs/src/main/asciidoc/security-authorize-web-endpoints-reference.adoc +++ b/docs/src/main/asciidoc/security-authorize-web-endpoints-reference.adoc @@ -65,15 +65,41 @@ quarkus.http.auth.permission.roles1.policy=role-policy1 ---- <1> This permission references the default built-in `permit` policy to allow `GET` methods to `/public`. In this case, the demonstrated setting would not affect this example because this request is allowed anyway. -<2> This permission references the built-in `deny` policy for `/forbidden`. +<2> This permission references the built-in `deny` policy for both `/forbidden` and `/forbidden/` paths. It is an exact path match because it does not end with `*`. <3> This permission set references the previously defined policy. `roles1` is an example name; you can call the permission sets whatever you want. -[WARNING] +[IMPORTANT] ==== -The exact path `/forbidden` in the example will not secure the `/forbidden/` path. -It is necessary to add a new exact path for the `/forbidden/` path to ensure proper security coverage. +The exact path pattern `/forbidden` in the example above also secures the `/forbidden/` path. +This way, the `forbidden` endpoint in the example below is secured by the `deny1` permission. + +[source,java] +---- +package org.acme.crud; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; + +@Path("/forbidden") +public class ForbiddenResource { + @GET + public String forbidden() { <1> + return "No!"; + } +} +---- +<1> Both `/forbidden` and `/forbidden/` paths need to be secured in order to secure the `forbidden` endpoint. + +If you need to permit access to the `/forbidden/` path, please add new permission with more specific exact path like in the example below: + +[source,properties] +---- +quarkus.http.auth.permission.permit1.paths=/forbidden/ <1> +quarkus.http.auth.permission.permit1.policy=permit +---- +<1> The `/forbidden/` path is not secured. ==== [[custom-http-security-policy]] diff --git a/docs/src/main/asciidoc/security-customization.adoc b/docs/src/main/asciidoc/security-customization.adoc index a60c30a0ca2e08..f8d3de551d00dc 100644 --- a/docs/src/main/asciidoc/security-customization.adoc +++ b/docs/src/main/asciidoc/security-customization.adoc @@ -72,6 +72,12 @@ public class CustomAwareJWTAuthMechanism implements HttpAuthenticationMechanism } ---- +TIP: The `HttpAuthenticationMechanism` should transform incoming HTTP request with suitable authentication credentials +into an `io.quarkus.security.identity.request.AuthenticationRequest` instance and delegate the authentication to the `io.quarkus.security.identity.IdentityProviderManager`. +Leaving authentication to the `io.quarkus.security.identity.IdentityProvider`s gives you more options for credentials verifications, +as well as convenient way to perform blocking tasks. +Nevertheless, the `io.quarkus.security.identity.IdentityProvider` can be omitted and the `HttpAuthenticationMechanism` is free to authenticate request on its own in trivial use cases. + [[dealing-with-more-than-one-http-auth-mechanisms]] == Dealing with more than one HttpAuthenticationMechanism diff --git a/docs/src/main/asciidoc/security-jdbc.adoc b/docs/src/main/asciidoc/security-jdbc.adoc index e9447819c373a9..a8718e70ebd108 100644 --- a/docs/src/main/asciidoc/security-jdbc.adoc +++ b/docs/src/main/asciidoc/security-jdbc.adoc @@ -167,6 +167,8 @@ quarkus.datasource.jdbc.url=jdbc:postgresql:elytron-security-jdbc ---- In our context, we are using PostgreSQL as identity store, and we initialize the database with users and roles. +We will use the salted and hashed version of `password` as a password in this example. +We can use the `BcryptUtil` class to generate passwords in the Modular Crypt Format (MCF). [source,sql] ---- @@ -177,16 +179,27 @@ CREATE TABLE test_user ( role VARCHAR(255) ); -INSERT INTO test_user (id, username, password, role) VALUES (1, 'admin', 'admin', 'admin'); -INSERT INTO test_user (id, username, password, role) VALUES (2, 'user','user', 'user'); +INSERT INTO test_user (id, username, password, role) VALUES (1, 'admin', '$2a$10$Uc.SZ0hvGJQlYdsAp7be1.lFjmOnc7aAr4L0YY3/VN3oK.F8zJHRG', 'admin'); +INSERT INTO test_user (id, username, password, role) VALUES (2, 'user','$2a$10$Uc.SZ0hvGJQlYdsAp7be1.lFjmOnc7aAr4L0YY3/VN3oK.F8zJHRG', 'user'); ---- -[NOTE] -==== -It is probably useless, but we kindly remind you that you must not store clear-text passwords in production environment ;-). -The `elytron-security-jdbc` extension offers a built-in bcrypt password mapper. -Please refer to the xref:security-getting-started-tutorial.adoc#define-the-user-entity[Define the user entity] section of the Getting started with Security by using Basic authentication and Jakarta Persistence tutorial for practical example. -==== +When signing up new users, we can encrypt their password as follows: + +[source,java] +---- +package org.acme.security.jdbc; + +import io.quarkus.elytron.security.common.BcryptUtil; + +public class AccountService { + + public void signupUser(String username, String password) { + String encryptedPassword = BcryptUtil.bcryptHash(password); + + // store user with the encrypted password in the database + } +} +---- We can now configure the Elytron JDBC Realm. @@ -194,8 +207,10 @@ We can now configure the Elytron JDBC Realm. ---- quarkus.security.jdbc.enabled=true quarkus.security.jdbc.principal-query.sql=SELECT u.password, u.role FROM test_user u WHERE u.username=? <1> -quarkus.security.jdbc.principal-query.clear-password-mapper.enabled=true <2> -quarkus.security.jdbc.principal-query.clear-password-mapper.password-index=1 +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.enabled=true <2> +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.password-index=1 +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.salt-index=-1 +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.iteration-count-index=-1 quarkus.security.jdbc.principal-query.attribute-mappings.0.index=2 <3> quarkus.security.jdbc.principal-query.attribute-mappings.0.to=groups ---- @@ -203,7 +218,7 @@ quarkus.security.jdbc.principal-query.attribute-mappings.0.to=groups The `elytron-security-jdbc` extension requires at least one principal query to authenticate the user and its identity. <1> We define a parameterized SQL statement (with exactly 1 parameter) which should return the user's password plus any additional information you want to load. -<2> We configure the password mapper with the position of the password field in the `SELECT` fields and other information like salt, hash encoding, etc. +<2> We configure the password mapper with the position of the password field in the `SELECT` fields and other information like salt, hash encoding, etc. Setting the salt and iteration count indexes to `-1` is required for MCF. <3> We use `attribute-mappings` to bind the `SELECT` projection fields (i.e. `u.role` here) to the target Principal representation attributes. [NOTE] @@ -242,21 +257,21 @@ So far so good, now let's try with an allowed user. [source,shell] ---- -$ curl -i -X GET -u admin:admin http://localhost:8080/api/admin +$ curl -i -X GET -u admin:password http://localhost:8080/api/admin HTTP/1.1 200 OK Content-Length: 5 Content-Type: text/plain;charset=UTF-8 admin% ---- -By providing the `admin:admin` credentials, the extension authenticated the user and loaded their roles. +By providing the `admin:password` credentials, the extension authenticated the user and loaded their roles. The `admin` user is authorized to access to the protected resources. The user `admin` should be forbidden to access a resource protected with `@RolesAllowed("user")` because it doesn't have this role. [source,shell] ---- -$ curl -i -X GET -u admin:admin http://localhost:8080/api/users/me +$ curl -i -X GET -u admin:password http://localhost:8080/api/users/me HTTP/1.1 403 Forbidden Content-Length: 34 Content-Type: text/html;charset=UTF-8 @@ -268,7 +283,7 @@ Finally, using the user `user` works and the security context contains the princ [source,shell] ---- -$ curl -i -X GET -u user:user http://localhost:8080/api/users/me +$ curl -i -X GET -u user:password http://localhost:8080/api/users/me HTTP/1.1 200 OK Content-Length: 4 Content-Type: text/plain;charset=UTF-8 @@ -294,8 +309,10 @@ quarkus.datasource.permissions.jdbc.url=jdbc:postgresql:multiple-data-sources-pe quarkus.security.jdbc.enabled=true quarkus.security.jdbc.principal-query.sql=SELECT u.password FROM test_user u WHERE u.username=? -quarkus.security.jdbc.principal-query.clear-password-mapper.enabled=true -quarkus.security.jdbc.principal-query.clear-password-mapper.password-index=1 +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.enabled=true +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.password-index=1 +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.salt-index=-1 +quarkus.security.jdbc.principal-query.bcrypt-password-mapper.iteration-count-index=-1 quarkus.security.jdbc.principal-query.roles.sql=SELECT r.role_name FROM test_role r, test_user_role ur WHERE ur.username=? AND ur.role_id = r.id quarkus.security.jdbc.principal-query.roles.datasource=permissions diff --git a/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc b/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc index 0983304d2d8396..7c52d1b14c2813 100644 --- a/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc +++ b/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc @@ -492,9 +492,9 @@ For more information on how to configure Auth0 and Quarkus to have authorization So far we have only tested the Quarkus endpoint using OIDC authorization code flow. In this flow you use the browser to access the Quarkus endpoint, Quarkus itself manages the authorization code flow, a user is redirected to Auth0, logs in, is redirected back to Quarkus, Quarkus completes the flow by exchanging the code for the ID, access, and refresh tokens, and works with the ID token representing the successful user authentication. The access token is not relevant at the moment. As mentioned earlier, in the authorization code flow, Quarkus will only use the access token to access downstream services on behalf of the currently authenticated user. -Lets imagine though that the Quarkus endpoint we have developed has to accept `Bearer` access tokens too: it may be that the other Quarkus endpoint which is propagating it to this endpoint or it can be SPA which uses the access token to access the Quarkus endpoint. And Quarkus OIDC DevUI SPA which we already used to analyze the ID token fits perfectly for using the access token available to SPA to test the Quarkus endpoint. +Let's imagine though that the Quarkus endpoint we have developed has to accept `Bearer` access tokens too: it may be that the other Quarkus endpoint which is propagating it to this endpoint or it can be SPA which uses the access token to access the Quarkus endpoint. And Quarkus OIDC DevUI SPA which we already used to analyze the ID token fits perfectly for using the access token available to SPA to test the Quarkus endpoint. -Lets go again to http://localhost:8080/q/dev, select the `OpenId Connect` card, login to Auth0, and check the Access token content: +Let's go again to http://localhost:8080/q/dev-ui, select the `OpenId Connect` card, login to Auth0, and check the Access token content: image::auth0-devui-accesstoken.png[Auth0 DevUI Access Token] @@ -632,7 +632,7 @@ For more information about token propagation, see xref:security-openid-connect-c We have already looked in detail at how Quarkus OIDC can handle <>, but we don't want to propagate Auth0 opaque tokens to micro services which do something useful on behalf on the currently authenticated user, beyond checking its UserInfo. -A microservice which the front-end Quarkus application will access by propagating authorization code flow access tokens to it is represented in the Auth0 dashboard as an `API`. Lets add it in the `Applications/APIs`: +A microservice which the front-end Quarkus application will access by propagating authorization code flow access tokens to it is represented in the Auth0 dashboard as an `API`. Let's add it in the `Applications/APIs`: image::auth0-api.png[Auth0 API] @@ -804,6 +804,12 @@ public class GreetingResource { Open a browser, access http://localhost:8080/hello and get your name displayed in the browser. +Let's go to http://localhost:8080/q/dev-ui, select the `OpenId Connect` card, login to Auth0, and check the Access token content: + +image::auth0-devui-jwt-accesstoken.png[Auth0 DevUI JWT Access Token] + +As you can see, the access token is no longer encrypted as shown in the <> section and indeed it is in the JWT format now. + [[permission-based-access-control]] === Permission Based Access Control @@ -939,7 +945,7 @@ quarkus.oidc.client-id=sKQu1dXjHB6r0sra0Y1YCqBZKWXqCkly quarkus.oidc.credentials.secret=${client-secret} ---- -In production, you will distinguish between prod and test level configuration with `%prod.` and `%test.` qualifiers. Lets assume that the above configuration will indeed be prefixed with `%test.` in your real application, with this configuration also including the `%prod.` qualified Auth0 production tenant configuration. +In production, you will distinguish between prod and test level configuration with `%prod.` and `%test.` qualifiers. Let's assume that the above configuration will indeed be prefixed with `%test.` in your real application, with this configuration also including the `%prod.` qualified Auth0 production tenant configuration. Using `OidcTestClient` to test such configuration requires acquiring a token from the Auth0 dev tenant, using either OAuth2 `password` or `client_credentials` grant, we will try a `password` grant. Make sure the application registered in the Auth0 dashboard allows the `password` grant: diff --git a/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc b/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc index a4a346d4e7fdc8..9f0e7a32431c5c 100644 --- a/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc +++ b/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc @@ -298,7 +298,7 @@ quarkus.oidc.introspection-credentials.secret=introspection-user-secret [[oidc-request-filters]] ==== OIDC request filters -You can filter OIDC requests made by Quarkus to the OIDC provider by registering one or more `OidcRequestFiler` implementations, which can update or add new request headers and can also log requests. +You can filter OIDC requests made by Quarkus to the OIDC provider by registering one or more `OidcRequestFilter` implementations, which can update or add new request headers and can also log requests. For example: diff --git a/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc b/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc index 3938d8a629d4e7..155aa0516065f8 100644 --- a/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc @@ -702,6 +702,71 @@ quarkus.oidc-client.credentials.jwt.subject=custom-subject quarkus.oidc-client.credentials.jwt.issuer=custom-issuer ---- +==== JWT Bearer + +link:https://www.rfc-editor.org/rfc/rfc7523[RFC7523] explains how JWT Bearer tokens can be used to authenticate clients, see the link:https://www.rfc-editor.org/rfc/rfc7523#section-2.2[Using JWTs for Client Authentication] section for more information. + +It can be enabled as follows: + +[source,properties] +---- +quarkus.oidc-client.auth-server-url=${auth-server-url} +quarkus.oidc-client.client-id=quarkus-app +quarkus.oidc-client.credentials.jwt.source=bearer +---- + +Next, the JWT bearer token must be provided as a `client_assertion` parameter to the OIDC client. + +You can use `OidcClient` methods for acquiring or refreshing tokens which accept additional grant parameters, for example, `oidcClient.getTokens(Map.of("client_assertion", "ey..."))`. + +If you work work with the OIDC client filters then you must register a custom filter which will provide this assertion. + +Here is an example of the RestEasy Reactive custom filter: + +[source,java] +---- +package io.quarkus.it.keycloak; + +import java.util.Map; + +import io.quarkus.oidc.client.reactive.filter.runtime.AbstractOidcClientRequestReactiveFilter; +import io.quarkus.oidc.common.runtime.OidcConstants; +import jakarta.annotation.Priority; +import jakarta.ws.rs.Priorities; + +@Priority(Priorities.AUTHENTICATION) +public class OidcClientRequestCustomFilter extends AbstractOidcClientRequestReactiveFilter { + + @Override + protected Map additionalParameters() { + return Map.of(OidcConstants.CLIENT_ASSERTION, "ey..."); + } +} +---- + +Here is an example of the RestEasy Classic custom filter: + +[source,java] +---- +package io.quarkus.it.keycloak; + +import java.util.Map; + +import io.quarkus.oidc.client.filter.runtime.AbstractOidcClientRequestFilter; +import io.quarkus.oidc.common.runtime.OidcConstants; +import jakarta.annotation.Priority; +import jakarta.ws.rs.Priorities; + +@Priority(Priorities.AUTHENTICATION) +public class OidcClientRequestCustomFilter extends AbstractOidcClientRequestFilter { + + @Override + protected Map additionalParameters() { + return Map.of(OidcConstants.CLIENT_ASSERTION, "ey..."); + } +} +---- + ==== Apple POST JWT Apple OpenID Connect Provider uses a `client_secret_post` method where a secret is a JWT produced with a `private_key_jwt` authentication method but with Apple account-specific issuer and subject properties. @@ -879,7 +944,7 @@ quarkus.log.category."io.quarkus.oidc.client.runtime.OidcClientRecorder".min-lev [[oidc-request-filters]] == OIDC request filters -You can filter OIDC requests made by Quarkus to the OIDC provider by registering one or more `OidcRequestFiler` implementations, which can update or add new request headers. For example, a filter can analyze the request body and add its digest as a new header value: +You can filter OIDC requests made by Quarkus to the OIDC provider by registering one or more `OidcRequestFilter` implementations, which can update or add new request headers. For example, a filter can analyze the request body and add its digest as a new header value: [source,java] ---- @@ -966,8 +1031,9 @@ quarkus.oidc-client.credentials.secret=secret quarkus.oidc-client.grant.type=exchange quarkus.oidc-client.grant-options.exchange.audience=quarkus-app-exchange -quarkus.oidc-token-propagation.exchange-token=true +quarkus.oidc-token-propagation.exchange-token=true <1> ---- +<1> Please note that the `exchange-token` configuration property is ignored when the OidcClient name is set with the `io.quarkus.oidc.token.propagation.AccessToken#exchangeTokenClient` annotation attribute. Note `AccessTokenRequestReactiveFilter` will use `OidcClient` to exchange the current token, and you can use `quarkus.oidc-client.grant-options.exchange` to set the additional exchange properties expected by your OpenID Connect Provider. @@ -986,7 +1052,7 @@ quarkus.oidc-client.scopes=https://graph.microsoft.com/user.read,offline_access quarkus.oidc-token-propagation-reactive.exchange-token=true ---- -`AccessTokenRequestReactiveFilter` uses a default `OidcClient` by default. A named `OidcClient` can be selected with a `quarkus.oidc-token-propagation-reactive.client-name` configuration property. +`AccessTokenRequestReactiveFilter` uses a default `OidcClient` by default. A named `OidcClient` can be selected with a `quarkus.oidc-token-propagation-reactive.client-name` configuration property or with the `io.quarkus.oidc.token.propagation.AccessToken#exchangeTokenClient` annotation attribute. [[token-propagation]] == Token Propagation diff --git a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc index 5645acfd4a8c7b..36a3fe40541c2e 100644 --- a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc @@ -258,7 +258,7 @@ For more information, see xref:security-oidc-bearer-token-authentication.adoc#in [[keycloak-initialization]] === Keycloak initialization -The `quay.io/keycloak/keycloak:23.0.4` image which contains a Keycloak distribution powered by Quarkus is used to start a container by default. +The `quay.io/keycloak/keycloak:23.0.7` image which contains a Keycloak distribution powered by Quarkus is used to start a container by default. `quarkus.keycloak.devservices.image-name` can be used to change the Keycloak image name. For example, set it to `quay.io/keycloak/keycloak:19.0.3-legacy` to use a Keycloak distribution powered by WildFly. Be aware that a Quarkus-based Keycloak distribution is only available starting from Keycloak `20.0.0`. diff --git a/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc b/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc index 546e4c0e17c564..b98845ca537e85 100644 --- a/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-multitenancy.adoc @@ -724,6 +724,19 @@ public class HelloResource { ---- <1> The `io.quarkus.oidc.Tenant` annotation must be placed on either the resource class or resource method. +[[TIP]] +In the example above, authentication of the `sayHello` endpoint is enforced with the `@Authenticated` annotation. +Alternatively, if you use an the xref:security-authorize-web-endpoints-reference.adoc#authorization-using-configuration[HTTP Security policy] +to secure the endpoint, then, for the `@Tenant` annotation be effective, you must delay this policy's permission check as shown in the example below: +[source,properties] +---- +quarkus.http.auth.permission.authenticated.paths=/api/hello +quarkus.http.auth.permission.authenticated.methods=GET +quarkus.http.auth.permission.authenticated.policy=authenticated +quarkus.http.auth.permission.authenticated.applies-to=JAXRS <1> +---- +<1> Tell Quarkus to run the HTTP permission check after the tenant has been selected with the `@Tenant` annotation. + [[tenant-config-resolver]] == Dynamic tenant configuration resolution diff --git a/docs/src/main/asciidoc/vertx-reference.adoc b/docs/src/main/asciidoc/vertx-reference.adoc index b7956de6f443be..0d940abd968b88 100644 --- a/docs/src/main/asciidoc/vertx-reference.adoc +++ b/docs/src/main/asciidoc/vertx-reference.adoc @@ -27,13 +27,14 @@ With this extension, you can retrieve the managed instance of Vert.x using eithe ---- @ApplicationScoped public class MyBean { -// Field injection -@Inject Vertx vertx; -// Constructor injection -MyBean(Vertx vertx) { - // ... -} + // Field injection + @Inject Vertx vertx; + + // Constructor injection + MyBean(Vertx vertx) { + // ... + } } ---- @@ -83,7 +84,7 @@ Check the associated documentation to learn how to use them. |AMQP Client |`io.quarkus:quarkus-smallrye-reactive-messaging-amqp` (extension) -|xref:amqp.adoc +|xref:amqp.adoc[Getting Started to SmallRye Reactive Messaging with AMQP] |Circuit Breaker |`io.smallrye.reactive:smallrye-mutiny-vertx-circuit-breaker` (external dependency) @@ -95,15 +96,15 @@ Check the associated documentation to learn how to use them. |DB2 Client |`io.quarkus:quarkus-reactive-db2-client` (extension) -|xref:reactive-sql-clients.adoc +|xref:reactive-sql-clients.adoc[Reactive SQL Clients] |Kafka Client |`io.quarkus:quarkus-smallrye-reactive-messaging-kafka` (extension) -|xref:kafka.adoc +|xref:kafka.adoc[Apache Kafka Reference Guide] |Mail Client |`io.quarkus:quarkus-mailer` (extension) -|xref:mailer.adoc +|xref:mailer.adoc[Sending emails using SMTP] |MQTT Client |`io.quarkus:quarkus-smallrye-reactive-messaging-mqtt` (extension) @@ -111,19 +112,19 @@ Check the associated documentation to learn how to use them. |MS SQL Client |`io.quarkus:quarkus-reactive-mssql-client` (extension) -|xref:reactive-sql-clients.adoc +|xref:reactive-sql-clients.adoc[Reactive SQL Clients] |MySQL Client |`io.quarkus:quarkus-reactive-mysql-client` (extension) -|xref:reactive-sql-clients.adoc +|xref:reactive-sql-clients.adoc[Reactive SQL Clients] |Oracle Client |`io.quarkus:quarkus-reactive-oracle-client` (extension) -|xref:reactive-sql-clients.adoc +|xref:reactive-sql-clients.adoc[Reactive SQL Clients] |PostgreSQL Client |`io.quarkus:quarkus-reactive-pg-client` (extension) -|xref:reactive-sql-clients.adoc +|xref:reactive-sql-clients.adoc[Reactive SQL Clients] |RabbitMQ Client |`io.smallrye.reactive:smallrye-mutiny-vertx-rabbitmq-client` (external dependency) @@ -131,7 +132,7 @@ Check the associated documentation to learn how to use them. |Redis Client |`io.quarkus:quarkus-redis-client` (extension) -|xref:redis.adoc +|xref:redis.adoc[Using the Redis Client] |Web Client |`io.smallrye.reactive:smallrye-mutiny-vertx-web-client` (external dependency) diff --git a/docs/src/main/asciidoc/writing-extensions.adoc b/docs/src/main/asciidoc/writing-extensions.adoc index 36f38ed605e3eb..9ce12cb7c48e7a 100644 --- a/docs/src/main/asciidoc/writing-extensions.adoc +++ b/docs/src/main/asciidoc/writing-extensions.adoc @@ -384,8 +384,12 @@ Your extension project should be setup as a multi-module project with two submod Your runtime artifact should depend on `io.quarkus:quarkus-core`, and possibly the runtime artifacts of other Quarkus modules if you want to use functionality provided by them. + Your deployment time module should depend on `io.quarkus:quarkus-core-deployment`, your runtime artifact, -and possibly the deployment artifacts of other Quarkus modules if you want to use functionality provided by them. +and the deployment artifacts of any other Quarkus extensions your own extension depends on. This is essential, otherwise any transitively +pulled in extensions will not provide their full functionality. + +NOTE: The Maven and Gradle plugins will validate this for you and alert you to any deployment artifacts you might have forgotten to add. [WARNING] ==== @@ -573,10 +577,6 @@ dependencies { } ---- -[WARNING] -==== -This plugin is still experimental, it does not validate the extension dependencies as the equivalent Maven plugin does. -==== [[build-step-processors]] === Build Step Processors diff --git a/docs/src/main/asciidoc/writing-native-applications-tips.adoc b/docs/src/main/asciidoc/writing-native-applications-tips.adoc index 10e9e43b4a6fcd..2dd53e2c76bd74 100644 --- a/docs/src/main/asciidoc/writing-native-applications-tips.adoc +++ b/docs/src/main/asciidoc/writing-native-applications-tips.adoc @@ -60,7 +60,9 @@ will include: ==== Using a configuration file If globs are not sufficiently precise for your use case and you need to rely on regular expressions, or if you prefer relying on the GraalVM infrastructure, -you can also create a `resource-config.json` (the most common location is within `src/main/resources`) JSON file defining which resources should be included. +you can also create a `resource-config.json` JSON file defining which resources should be included. +Ideally this, and other native image configuration files, should be placed under the `src/main/resources/META-INF/native-image//` folder. +This way they will be automatically parsed by the native build, without additional configuration. [WARNING] ==== @@ -93,60 +95,6 @@ Here we include all the XML files and JSON files into the native executable. For more information about this topic, see the link:https://www.graalvm.org/{graalvm-docs-version}/reference-manual/native-image/dynamic-features/Resources/[GraalVM Accessing Resources in Native Image] guide. ==== -The final order of business is to make the configuration file known to the `native-image` executable by adding the proper configuration to `application.properties`: - -[source,properties] ----- -quarkus.native.additional-build-args =\ - -H:+UnlockExperimentalVMOptions,\ - -H:ResourceConfigurationFiles=resource-config.json,\ - -H:-UnlockExperimentalVMOptions ----- - -[NOTE] -==== -Starting with Mandrel 23.1 and GraalVM for JDK 21, `-H:ResourceConfigurationFiles=resource-config.json` results in a warning being shown unless wrapped in `-H:+UnlockExperimentalVMOptions` and `-H:-UnlockExperimentalVMOptions`. -The absence of these options will result in build failures in the future. -==== - -In the previous snippet we were able to simply use `resource-config.json` instead of specifying the entire path of the file simply because it was added to `src/main/resources`. -If the file had been added to another directory, the proper file path would have had to be specified manually. - -[TIP] -==== -Multiple options may be separated by a comma. For example, one could use: - -[source,properties] ----- -quarkus.native.additional-build-args =\ - -H:+UnlockExperimentalVMOptions,\ - -H:ResourceConfigurationFiles=resource-config.json,\ - -H:ReflectionConfigurationFiles=reflect-config.json,\ - -H:-UnlockExperimentalVMOptions ----- - -in order to ensure that various resources are included and additional reflection is registered. - -==== -If for some reason adding the aforementioned configuration to `application.properties` is not desirable, it is possible to configure the build tool to effectively perform the same operation. - -When using Maven, we could use the following configuration: - -[source,xml] ----- - - - native - - native - - -H:+UnlockExperimentalVMOptions,-H:ResourceConfigurationFiles=resource-config.json,-H:-UnlockExperimentalVMOptions - - - - ----- - === Registering for reflection When building a native executable, GraalVM operates with a closed world assumption. @@ -280,59 +228,9 @@ As an example, in order to register all methods of class `com.acme.MyClass` for For more information about the format of this file, see the link:https://www.graalvm.org/{graalvm-docs-version}/reference-manual/native-image/dynamic-features/Reflection/[GraalVM Reflection in Native Image] guide. ==== -The final order of business is to make the configuration file known to the `native-image` executable by adding the proper configuration to `application.properties`: - -[source,properties] ----- -quarkus.native.additional-build-args =\ - -H:+UnlockExperimentalVMOptions,\ - -H:ReflectionConfigurationFiles=reflect-config.json,\ - -H:-UnlockExperimentalVMOptions ----- - -[NOTE] -==== -Starting with Mandrel 23.1 and GraalVM for JDK 21, `-H:ResourceConfigurationFiles=resource-config.json` results in a warning being shown unless wrapped in `-H:+UnlockExperimentalVMOptions` and `-H:-UnlockExperimentalVMOptions`. -The absence of these options will result in build failures in the future. -==== - -In the previous snippet we were able to simply use `reflect-config.json` instead of specifying the entire path of the file simply because it was added to `src/main/resources`. -If the file had been added to another directory, the proper file path would have had to be specified manually. - -[TIP] -==== -Multiple options may be separated by a comma. For example, one could use: - -[source,properties] ----- -quarkus.native.additional-build-args =\ - -H:+UnlockExperimentalVMOptions,\ - -H:ResourceConfigurationFiles=resource-config.json,\ - -H:ReflectionConfigurationFiles=reflect-config.json,\ - -H:-UnlockExperimentalVMOptions ----- - -in order to ensure that various resources are included and additional reflection is registered. - -==== -If for some reason adding the aforementioned configuration to `application.properties` is not desirable, it is possible to configure the build tool to effectively perform the same operation. - -When using Maven, we could use the following configuration: - -[source,xml] ----- - - - native - - native - - -H:+UnlockExperimentalVMOptions,-H:ReflectionConfigurationFiles=reflect-config.json,-H:-UnlockExperimentalVMOptions - - - - ----- +The final order of business is to make the configuration file known to the `native-image` executable. +To do that, place the configuration file under the `src/main/resources/META-INF/native-image//` folder. +This way they will be automatically parsed by the native build, without additional configuration. [[delay-class-init-in-your-app]] === Delaying class initialization diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java index 3fc83fad2d6754..3b8694dcc50252 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java @@ -180,7 +180,7 @@ public AgroalDataSource doCreateDataSource(String dataSourceName, boolean failIf } DataSourceJdbcRuntimeConfig dataSourceJdbcRuntimeConfig = dataSourcesJdbcRuntimeConfig - .getDataSourceJdbcRuntimeConfig(dataSourceName); + .dataSources().get(dataSourceName).jdbc(); if (!dataSourceJdbcRuntimeConfig.url().isPresent()) { //this is not an error situation, because we want to allow the situation where a JDBC extension //is installed but has not been configured diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcBuildTimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcBuildTimeConfig.java index ed47c6fbff8885..b8a76bf2158d42 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcBuildTimeConfig.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcBuildTimeConfig.java @@ -4,7 +4,6 @@ import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; @@ -20,7 +19,6 @@ public interface DataSourcesJdbcBuildTimeConfig { /** * Datasources. */ - @ConfigDocSection @ConfigDocMapKey("datasource-name") @WithParentName @WithDefaults diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcRuntimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcRuntimeConfig.java index a2b406a2dc6ecb..359a7ccdcebeeb 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcRuntimeConfig.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourcesJdbcRuntimeConfig.java @@ -4,34 +4,29 @@ import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; import io.smallrye.config.ConfigMapping; import io.smallrye.config.WithDefaults; import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; @ConfigMapping(prefix = "quarkus.datasource") @ConfigRoot(phase = ConfigPhase.RUN_TIME) public interface DataSourcesJdbcRuntimeConfig { /** - * The default datasource. + * Datasources. */ - DataSourceJdbcRuntimeConfig jdbc(); - - /** - * Additional named datasources. - */ - @ConfigDocSection @ConfigDocMapKey("datasource-name") @WithParentName @WithDefaults - Map namedDataSources(); + @WithUnnamedKey(DataSourceUtil.DEFAULT_DATASOURCE_NAME) + Map dataSources(); @ConfigGroup - public interface DataSourceJdbcOuterNamedRuntimeConfig { + interface DataSourceJdbcOuterNamedRuntimeConfig { /** * The JDBC runtime configuration. @@ -39,11 +34,4 @@ public interface DataSourceJdbcOuterNamedRuntimeConfig { DataSourceJdbcRuntimeConfig jdbc(); } - default DataSourceJdbcRuntimeConfig getDataSourceJdbcRuntimeConfig(String dataSourceName) { - if (DataSourceUtil.isDefault(dataSourceName)) { - return jdbc(); - } - - return namedDataSources().get(dataSourceName).jdbc(); - } } diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java index 7a6d03fee425a1..4a6e3db63be68c 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java @@ -18,6 +18,7 @@ import org.eclipse.microprofile.health.Readiness; import io.agroal.api.AgroalDataSource; +import io.quarkus.agroal.runtime.AgroalDataSourceSupport; import io.quarkus.agroal.runtime.DataSources; import io.quarkus.arc.Arc; import io.quarkus.datasource.common.runtime.DataSourceUtil; @@ -40,9 +41,10 @@ protected void init() { } DataSourceSupport support = Arc.container().instance(DataSourceSupport.class) .get(); - Set names = support.getConfiguredNames(); + AgroalDataSourceSupport agroalSupport = Arc.container().instance(AgroalDataSourceSupport.class) + .get(); Set excludedNames = support.getInactiveOrHealthCheckExcludedNames(); - for (String name : names) { + for (String name : agroalSupport.entries.keySet()) { if (excludedNames.contains(name)) { continue; } diff --git a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/synthetic/SyntheticBeanBuildItemProxyTest.java b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/synthetic/SyntheticBeanBuildItemProxyTest.java index 50275150a21626..6ce4b6c9d54771 100644 --- a/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/synthetic/SyntheticBeanBuildItemProxyTest.java +++ b/extensions/arc/deployment/src/test/java/io/quarkus/arc/test/synthetic/SyntheticBeanBuildItemProxyTest.java @@ -1,10 +1,11 @@ package io.quarkus.arc.test.synthetic; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.lang.reflect.Method; +import java.util.List; import java.util.function.Consumer; import jakarta.enterprise.context.ApplicationScoped; @@ -14,6 +15,7 @@ import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.arc.Arc; +import io.quarkus.arc.InstanceHandle; import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.arc.deployment.SyntheticBeanBuildItem.ExtendedBeanConfigurator; import io.quarkus.builder.BuildChainBuilder; @@ -46,20 +48,31 @@ public void execute(BuildContext context) { // We need to use reflection due to some class loading problems Object recorderProxy = bytecodeRecorder.getRecordingProxy(TestRecorder.class); try { - Method test = recorderProxy.getClass().getDeclaredMethod("test"); - Object proxy = test.invoke(recorderProxy); - ExtendedBeanConfigurator configurator = SyntheticBeanBuildItem.configure(SynthBean.class) + Method test = recorderProxy.getClass().getDeclaredMethod("test", String.class); + + Object proxy1 = test.invoke(recorderProxy, "ok"); + ExtendedBeanConfigurator configurator1 = SyntheticBeanBuildItem.configure(SynthBean.class) .scope(ApplicationScoped.class) + .identifier("ok") .unremovable(); // No creator assertThrows(IllegalStateException.class, - () -> configurator.done()); + () -> configurator1.done()); // Not a returned proxy assertThrows(IllegalArgumentException.class, - () -> configurator.runtimeProxy(new SynthBean())); - context.produce(configurator - .runtimeProxy(proxy) + () -> configurator1.runtimeProxy(new SynthBean())); + context.produce(configurator1 + .runtimeProxy(proxy1) + .done()); + + // Register a synthetic bean with same types and qualifiers but different identifier + context.produce(SyntheticBeanBuildItem.configure(SynthBean.class) + .scope(ApplicationScoped.class) + .identifier("nok") + .unremovable() + .runtimeProxy(test.invoke(recorderProxy, "nok")) .done()); + } catch (Exception e) { throw new RuntimeException(e); } @@ -73,9 +86,9 @@ public void execute(BuildContext context) { @Recorder public static class TestRecorder { - public SynthBean test() { + public SynthBean test(String val) { SynthBean bean = new SynthBean(); - bean.setValue("ok"); + bean.setValue(val); return bean; } @@ -83,9 +96,12 @@ public SynthBean test() { @Test public void testBeans() { - SynthBean bean = Arc.container().instance(SynthBean.class).get(); - assertNotNull(bean); - assertEquals("ok", bean.getValue()); + List> beans = Arc.container().listAll(SynthBean.class); + assertEquals(2, beans.size()); + for (InstanceHandle handle : beans) { + String val = handle.get().getValue(); + assertTrue("ok".equals(val) || "nok".equals(val)); + } } @Vetoed diff --git a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java index dde4d70e945c0e..3bfb8bdd2684be 100644 --- a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java +++ b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java @@ -3,7 +3,6 @@ import java.util.Optional; import com.microsoft.azure.functions.ExecutionContext; -import com.microsoft.azure.functions.HttpMethod; import com.microsoft.azure.functions.HttpRequestMessage; import com.microsoft.azure.functions.HttpResponseMessage; import com.microsoft.azure.functions.annotation.AuthorizationLevel; @@ -16,10 +15,8 @@ public class Function extends BaseFunction { @FunctionName(QUARKUS_HTTP) public HttpResponseMessage run( - @HttpTrigger(name = "req", dataType = "binary", methods = { HttpMethod.GET, HttpMethod.HEAD, HttpMethod.POST, - HttpMethod.PUT, - HttpMethod.OPTIONS }, route = "{*path}", authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, - final ExecutionContext context) { + @HttpTrigger(name = "req", dataType = "binary", route = "{*path}", authLevel = AuthorizationLevel.ANONYMOUS) HttpRequestMessage> request, + ExecutionContext context) { return dispatch(request); } diff --git a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java index 843f5a04802382..90c49d23397805 100644 --- a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java +++ b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java @@ -101,6 +101,8 @@ public class JibProcessor { private static final String JAVA_17_BASE_IMAGE = String.format("%s/%s-17-%s:1.18", UBI8_PREFIX, OPENJDK_PREFIX, RUNTIME_SUFFIX); + // The source for this can be found at https://github.com/jboss-container-images/openjdk/blob/ubi8/modules/run/artifacts/opt/jboss/container/java/run/run-java.sh + // A list of env vars that affect this script can be found at https://jboss-container-images.github.io/openjdk/ubi8/ubi8-openjdk-17.html private static final String RUN_JAVA_PATH = "/opt/jboss/container/java/run/run-java.sh"; private static final String DEFAULT_BASE_IMAGE_USER = "185"; @@ -455,6 +457,7 @@ private JibContainerBuilder createContainerBuilderFromFastJar(String baseJvmImag // which would mean AppCDS would not be taken into account at all entrypoint = List.of(RUN_JAVA_PATH); envVars.put("JAVA_APP_JAR", workDirInContainer + "/" + JarResultBuildStep.QUARKUS_RUN_JAR); + envVars.put("JAVA_APP_DIR", workDirInContainer.toString()); envVars.put("JAVA_OPTS_APPEND", String.join(" ", determineEffectiveJvmArguments(jibConfig, appCDSResult))); } else { List effectiveJvmArguments = determineEffectiveJvmArguments(jibConfig, appCDSResult); @@ -526,13 +529,14 @@ private JibContainerBuilder createContainerBuilderFromFastJar(String baseJvmImag try { Instant now = Instant.now(); + boolean enforceModificationTime = !jibConfig.useCurrentTimestampFileModification; Instant modificationTime = jibConfig.useCurrentTimestampFileModification ? now : Instant.EPOCH; JibContainerBuilder jibContainerBuilder = toJibContainerBuilder(baseJvmImage, jibConfig); if (fastChangingLibPaths.isEmpty()) { // just create a layer with the entire lib structure intact addLayer(jibContainerBuilder, Collections.singletonList(componentsPath.resolve(JarResultBuildStep.LIB)), - workDirInContainer, "fast-jar-lib", isMutableJar, modificationTime); + workDirInContainer, "fast-jar-lib", isMutableJar, enforceModificationTime, modificationTime); } else { // we need to manually create each layer // the idea here is that the fast changing libraries are created in a later layer, thus when they do change, @@ -546,14 +550,9 @@ private JibContainerBuilder createContainerBuilderFromFastJar(String baseJvmImag AbsoluteUnixPath libPathInContainer = workDirInContainer.resolve(JarResultBuildStep.LIB) .resolve(JarResultBuildStep.BOOT_LIB) .resolve(lib.getFileName()); - if (appCDSResult.isPresent()) { - // the boot lib jars need to preserve the modification time because otherwise AppCDS won't work - bootLibsLayerBuilder.addEntry(lib, libPathInContainer, - Files.getLastModifiedTime(lib).toInstant()); - } else { - bootLibsLayerBuilder.addEntry(lib, libPathInContainer); - } - + // the boot lib jars need to preserve the modification time because otherwise AppCDS won't work + bootLibsLayerBuilder.addEntry(lib, libPathInContainer, + Files.getLastModifiedTime(lib).toInstant()); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -566,15 +565,15 @@ private JibContainerBuilder createContainerBuilderFromFastJar(String baseJvmImag .resolve(JarResultBuildStep.DEPLOYMENT_LIB); addLayer(jibContainerBuilder, Collections.singletonList(deploymentPath), workDirInContainer.resolve(JarResultBuildStep.LIB), - "fast-jar-deployment-libs", true, modificationTime); + "fast-jar-deployment-libs", true, enforceModificationTime, modificationTime); } AbsoluteUnixPath libsMainPath = workDirInContainer.resolve(JarResultBuildStep.LIB) .resolve(JarResultBuildStep.MAIN); addLayer(jibContainerBuilder, nonFastChangingLibPaths, libsMainPath, "fast-jar-normal-libs", - isMutableJar, modificationTime); + isMutableJar, enforceModificationTime, modificationTime); addLayer(jibContainerBuilder, new ArrayList<>(fastChangingLibPaths), libsMainPath, "fast-jar-changing-libs", - isMutableJar, modificationTime); + isMutableJar, enforceModificationTime, modificationTime); } if (appCDSResult.isPresent()) { @@ -598,9 +597,9 @@ private JibContainerBuilder createContainerBuilderFromFastJar(String baseJvmImag } addLayer(jibContainerBuilder, Collections.singletonList(componentsPath.resolve(JarResultBuildStep.APP)), - workDirInContainer, "fast-jar-quarkus-app", isMutableJar, modificationTime); + workDirInContainer, "fast-jar-quarkus-app", isMutableJar, enforceModificationTime, modificationTime); addLayer(jibContainerBuilder, Collections.singletonList(componentsPath.resolve(JarResultBuildStep.QUARKUS)), - workDirInContainer, "fast-jar-quarkus", isMutableJar, modificationTime); + workDirInContainer, "fast-jar-quarkus", isMutableJar, enforceModificationTime, modificationTime); if (ContainerImageJibConfig.DEFAULT_WORKING_DIR.equals(jibConfig.workingDirectory)) { // this layer ensures that the working directory is writeable // see https://github.com/GoogleContainerTools/jib/issues/1270 @@ -664,7 +663,7 @@ private boolean containsRunJava(String baseJvmImage) { public JibContainerBuilder addLayer(JibContainerBuilder jibContainerBuilder, List files, AbsoluteUnixPath pathInContainer, String name, boolean isMutableJar, - Instant now) + boolean enforceModificationTime, Instant forcedModificationTime) throws IOException { FileEntriesLayer.Builder layerConfigurationBuilder = FileEntriesLayer.builder().setName(name); @@ -672,7 +671,17 @@ public JibContainerBuilder addLayer(JibContainerBuilder jibContainerBuilder, Lis layerConfigurationBuilder.addEntryRecursive( file, pathInContainer.resolve(file.getFileName()), isMutableJar ? REMOTE_DEV_FOLDER_PERMISSIONS_PROVIDER : DEFAULT_FILE_PERMISSIONS_PROVIDER, - (sourcePath, destinationPath) -> now, + (sourcePath, destinationPath) -> { + if (enforceModificationTime) { + return forcedModificationTime; + } + + try { + return Files.getLastModifiedTime(sourcePath).toInstant(); + } catch (IOException e) { + throw new RuntimeException("Unable to get last modified time for " + sourcePath, e); + } + }, isMutableJar ? REMOTE_DEV_OWNERSHIP_PROVIDER : DEFAULT_OWNERSHIP_PROVIDER); } diff --git a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceRecorder.java b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceRecorder.java index 87a39c554f6c3d..3f4b459505df80 100644 --- a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceRecorder.java +++ b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceRecorder.java @@ -15,20 +15,12 @@ public class DataSourceRecorder { public RuntimeValue createDataSourceSupport( DataSourcesBuildTimeConfig buildTimeConfig, DataSourcesRuntimeConfig runtimeConfig) { - Stream.Builder configured = Stream.builder(); Stream.Builder excludedForHealthChecks = Stream.builder(); for (Map.Entry dataSource : buildTimeConfig.dataSources().entrySet()) { - // TODO this is wrong, as the default datasource could be configured without db-kind being set: - // it's inferred automatically for the default datasource when possible. - // See https://github.com/quarkusio/quarkus/issues/37779 - if (dataSource.getValue().dbKind().isPresent()) { - configured.add(dataSource.getKey()); - } if (dataSource.getValue().healthExclude()) { excludedForHealthChecks.add(dataSource.getKey()); } } - Set names = configured.build().collect(toUnmodifiableSet()); Set excludedNames = excludedForHealthChecks.build().collect(toUnmodifiableSet()); Stream.Builder inactive = Stream.builder(); @@ -39,6 +31,6 @@ public RuntimeValue createDataSourceSupport( } Set inactiveNames = inactive.build().collect(toUnmodifiableSet()); - return new RuntimeValue<>(new DataSourceSupport(names, excludedNames, inactiveNames)); + return new RuntimeValue<>(new DataSourceSupport(excludedNames, inactiveNames)); } } diff --git a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceSupport.java b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceSupport.java index 96b4b0f1fa9a9d..21ea9141a98cea 100644 --- a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceSupport.java +++ b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceSupport.java @@ -12,26 +12,17 @@ */ public class DataSourceSupport { - private final Set configuredNames; private final Set inactiveNames; private final Set inactiveOrHealthCheckExcludedNames; - public DataSourceSupport(Set configuredNames, Set healthCheckExcludedNames, + public DataSourceSupport(Set healthCheckExcludedNames, Set inactiveNames) { - this.configuredNames = configuredNames; this.inactiveOrHealthCheckExcludedNames = new HashSet<>(); inactiveOrHealthCheckExcludedNames.addAll(inactiveNames); inactiveOrHealthCheckExcludedNames.addAll(healthCheckExcludedNames); this.inactiveNames = inactiveNames; } - // TODO careful when using this, as it might (incorrectly) not include the default datasource. - // See TODO in code that calls the constructor of this class. - // See https://github.com/quarkusio/quarkus/issues/37779 - public Set getConfiguredNames() { - return configuredNames; - } - public Set getInactiveNames() { return inactiveNames; } diff --git a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesBuildTimeConfig.java b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesBuildTimeConfig.java index eccf515ebc6c95..8074c5f18a286f 100644 --- a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesBuildTimeConfig.java +++ b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesBuildTimeConfig.java @@ -5,7 +5,6 @@ import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; import io.smallrye.config.ConfigMapping; @@ -22,7 +21,6 @@ public interface DataSourcesBuildTimeConfig { /** * Datasources. */ - @ConfigDocSection @ConfigDocMapKey("datasource-name") @WithParentName @WithDefaults diff --git a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesRuntimeConfig.java b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesRuntimeConfig.java index 91e24d97721d7e..099b1aa4eaa06f 100644 --- a/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesRuntimeConfig.java +++ b/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourcesRuntimeConfig.java @@ -4,7 +4,6 @@ import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; import io.smallrye.config.ConfigMapping; @@ -19,7 +18,6 @@ public interface DataSourcesRuntimeConfig { /** * Datasources. */ - @ConfigDocSection @ConfigDocMapKey("datasource-name") @WithParentName @WithDefaults diff --git a/extensions/devservices/common/pom.xml b/extensions/devservices/common/pom.xml index ef138c2628b2ff..c4a20804323064 100644 --- a/extensions/devservices/common/pom.xml +++ b/extensions/devservices/common/pom.xml @@ -31,6 +31,14 @@
+ + + commons-codec + commons-codec + io.quarkus quarkus-junit4-mock diff --git a/extensions/devservices/common/src/main/java/io/quarkus/devservices/common/Volumes.java b/extensions/devservices/common/src/main/java/io/quarkus/devservices/common/Volumes.java index 39219be41eed6e..2488bcf52540e4 100644 --- a/extensions/devservices/common/src/main/java/io/quarkus/devservices/common/Volumes.java +++ b/extensions/devservices/common/src/main/java/io/quarkus/devservices/common/Volumes.java @@ -1,6 +1,5 @@ package io.quarkus.devservices.common; -import java.net.URL; import java.util.Map; import org.testcontainers.containers.BindMode; @@ -18,19 +17,12 @@ private Volumes() { public static void addVolumes(GenericContainer container, Map volumes) { for (Map.Entry volume : volumes.entrySet()) { String hostLocation = volume.getKey(); - BindMode bindMode = BindMode.READ_WRITE; if (volume.getKey().startsWith(CLASSPATH)) { - URL url = Thread.currentThread().getContextClassLoader() - .getResource(hostLocation.replaceFirst(CLASSPATH, EMPTY)); - if (url == null) { - throw new IllegalStateException("Classpath resource at '" + hostLocation + "' not found!"); - } - - hostLocation = url.getPath(); - bindMode = BindMode.READ_ONLY; + container.withClasspathResourceMapping(hostLocation.replaceFirst(CLASSPATH, EMPTY), volume.getValue(), + BindMode.READ_ONLY); + } else { + container.withFileSystemBind(hostLocation, volume.getValue(), BindMode.READ_WRITE); } - - container.withFileSystemBind(hostLocation, volume.getValue(), bindMode); } } } diff --git a/extensions/devservices/db2/src/main/java/io/quarkus/devservices/db2/deployment/DB2DevServicesProcessor.java b/extensions/devservices/db2/src/main/java/io/quarkus/devservices/db2/deployment/DB2DevServicesProcessor.java index 6251dc5e96de03..9940177775761a 100644 --- a/extensions/devservices/db2/src/main/java/io/quarkus/devservices/db2/deployment/DB2DevServicesProcessor.java +++ b/extensions/devservices/db2/src/main/java/io/quarkus/devservices/db2/deployment/DB2DevServicesProcessor.java @@ -82,7 +82,7 @@ private static class QuarkusDb2Container extends Db2Container { public QuarkusDb2Container(Optional imageName, OptionalInt fixedExposedPort, boolean useSharedNetwork) { super(DockerImageName.parse(imageName.orElseGet(() -> ConfigureUtil.getDefaultImageNameFor("db2"))) - .asCompatibleSubstituteFor(DockerImageName.parse("ibmcom/db2"))); + .asCompatibleSubstituteFor(DockerImageName.parse("icr.io/db2_community/db2"))); this.fixedExposedPort = fixedExposedPort; this.useSharedNetwork = useSharedNetwork; } diff --git a/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java b/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java index 8fbbc469848234..eee0fdf9d98acd 100644 --- a/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java +++ b/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java @@ -181,29 +181,6 @@ private DevServicesResultBuildItem.RunningDevService startElasticsearch( Distribution resolvedDistribution = resolveDistribution(config, buildItemConfig); DockerImageName resolvedImageName = resolveImageName(config, resolvedDistribution); - // Hibernate Search Elasticsearch have a version configuration property, we need to check that it is coherent - // with the image we are about to launch - if (buildItemConfig.version != null) { - String containerTag = resolvedImageName.getVersionPart(); - if (!containerTag.startsWith(buildItemConfig.version)) { - throw new BuildException( - "Dev Services for Elasticsearch detected a version mismatch." - + " Consuming extensions are configured to use version " + config.imageName - + " but Dev Services are configured to use version " + buildItemConfig.version + - ". Either configure the same version or disable Dev Services for Elasticsearch.", - Collections.emptyList()); - } - } - - if (buildItemConfig.distribution != null - && !buildItemConfig.distribution.equals(resolvedDistribution)) { - throw new BuildException( - "Dev Services for Elasticsearch detected a distribution mismatch." - + " Consuming extensions are configured to use distribution " + config.distribution - + " but Dev Services are configured to use distribution " + buildItemConfig.distribution + - ". Either configure the same distribution or disable Dev Services for Elasticsearch.", - Collections.emptyList()); - } final Optional maybeContainerAddress = elasticsearchContainerLocator.locateContainer( config.serviceName, @@ -214,8 +191,8 @@ private DevServicesResultBuildItem.RunningDevService startElasticsearch( final Supplier defaultElasticsearchSupplier = () -> { GenericContainer container = resolvedDistribution.equals(Distribution.ELASTIC) - ? createElasticsearchContainer(config, resolvedImageName) - : createOpensearchContainer(config, resolvedImageName); + ? createElasticsearchContainer(config, resolvedImageName, useSharedNetwork) + : createOpensearchContainer(config, resolvedImageName, useSharedNetwork); if (config.serviceName != null) { container.withLabel(DEV_SERVICE_LABEL, config.serviceName); @@ -247,27 +224,31 @@ private DevServicesResultBuildItem.RunningDevService startElasticsearch( } private GenericContainer createElasticsearchContainer(ElasticsearchDevServicesBuildTimeConfig config, - DockerImageName resolvedImageName) { + DockerImageName resolvedImageName, boolean useSharedNetwork) { ElasticsearchContainer container = new ElasticsearchContainer( resolvedImageName.asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch")); - ConfigureUtil.configureSharedNetwork(container, DEV_SERVICE_ELASTICSEARCH); + if (useSharedNetwork) { + ConfigureUtil.configureSharedNetwork(container, DEV_SERVICE_ELASTICSEARCH); + } // Disable security as else we would need to configure it correctly to avoid tons of WARNING in the log container.addEnv("xpack.security.enabled", "false"); // Disable disk-based shard allocation thresholds: // in a single-node setup they just don't make sense, // and lead to problems on large disks with little space left. - // See https://www.elastic.co/guide/en/elasticsearch/reference/8.9/modules-cluster.html#disk-based-shard-allocation + // See https://www.elastic.co/guide/en/elasticsearch/reference/8.12/modules-cluster.html#disk-based-shard-allocation container.addEnv("cluster.routing.allocation.disk.threshold_enabled", "false"); container.addEnv("ES_JAVA_OPTS", config.javaOpts); return container; } private GenericContainer createOpensearchContainer(ElasticsearchDevServicesBuildTimeConfig config, - DockerImageName resolvedImageName) { + DockerImageName resolvedImageName, boolean useSharedNetwork) { OpensearchContainer container = new OpensearchContainer( resolvedImageName.asCompatibleSubstituteFor("opensearchproject/opensearch")); - ConfigureUtil.configureSharedNetwork(container, DEV_SERVICE_OPENSEARCH); + if (useSharedNetwork) { + ConfigureUtil.configureSharedNetwork(container, DEV_SERVICE_OPENSEARCH); + } container.addEnv("bootstrap.memory_lock", "true"); container.addEnv("plugins.index_state_management.enabled", "false"); diff --git a/extensions/grpc/deployment/pom.xml b/extensions/grpc/deployment/pom.xml index e5639e2ea0477c..5119c4215af59d 100644 --- a/extensions/grpc/deployment/pom.xml +++ b/extensions/grpc/deployment/pom.xml @@ -101,6 +101,11 @@ quarkus-elytron-security-properties-file-deployment test + + me.escoffier.certs + certificate-generator-junit5 + test + diff --git a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java index 54f481ff16420e..e66fef8f2e7ef4 100644 --- a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java +++ b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java @@ -26,6 +26,7 @@ import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.AnnotationTarget.Kind; import org.jboss.jandex.AnnotationValue; import org.jboss.jandex.ClassInfo; @@ -345,16 +346,22 @@ public void transform(TransformationContext ctx) { AnnotationInstance clientAnnotation = Annotations.find(ctx.getQualifiers(), GrpcDotNames.GRPC_CLIENT); if (clientAnnotation != null && clientAnnotation.value() == null) { String clientName = null; + AnnotationTarget annotationTarget = ctx.getTarget(); if (ctx.getTarget().kind() == Kind.FIELD) { clientName = clientAnnotation.target().asField().name(); - } else if (ctx.getTarget().kind() == Kind.METHOD_PARAMETER) { + } else if (ctx.getTarget().kind() == Kind.METHOD + && clientAnnotation.target().kind().equals(Kind.METHOD_PARAMETER)) { MethodParameterInfo param = clientAnnotation.target().asMethodParameter(); + annotationTarget = param; // We don't need to check if parameter names are recorded - that's validated elsewhere clientName = param.method().parameterName(param.position()); } if (clientName != null) { ctx.transform().remove(GrpcDotNames::isGrpcClient) - .add(GrpcDotNames.GRPC_CLIENT, AnnotationValue.createStringValue("value", clientName)).done(); + .add(AnnotationInstance.builder(GrpcDotNames.GRPC_CLIENT) + .value(clientName) + .buildWithTarget(annotationTarget)) + .done(); } } } diff --git a/extensions/grpc/deployment/src/main/resources/dev-ui/qwc-grpc-services.js b/extensions/grpc/deployment/src/main/resources/dev-ui/qwc-grpc-services.js index 27eba4c5a3594e..7a505a4861051d 100644 --- a/extensions/grpc/deployment/src/main/resources/dev-ui/qwc-grpc-services.js +++ b/extensions/grpc/deployment/src/main/resources/dev-ui/qwc-grpc-services.js @@ -2,6 +2,9 @@ import { QwcHotReloadElement, html, css} from 'qwc-hot-reload-element'; import { JsonRpc } from 'jsonrpc'; import { columnBodyRenderer } from '@vaadin/grid/lit.js'; import { gridRowDetailsRenderer } from '@vaadin/grid/lit.js'; +import { observeState } from 'lit-element-state'; +import { themeState } from 'theme-state'; +import '@quarkus-webcomponents/codeblock'; import '@vaadin/progress-bar'; import '@vaadin/grid'; import '@vaadin/grid/vaadin-grid-sort-column.js'; @@ -9,14 +12,13 @@ import '@vaadin/vertical-layout'; import '@vaadin/tabs'; import '@vaadin/split-layout'; import 'qui-badge'; -import 'qui-code-block'; import 'qui-ide-link'; import '@vaadin/button'; /** * This component shows the Grpc Services */ -export class QwcGrpcServices extends QwcHotReloadElement { +export class QwcGrpcServices extends observeState(QwcHotReloadElement) { jsonRpc = new JsonRpc(this); streamsMap = new Map(); @@ -214,6 +216,7 @@ export class QwcGrpcServices extends QwcHotReloadElement { mode='json' content='${method.prototype}' value='${method.prototype}' + theme='${themeState.theme.name}' editable> @@ -221,7 +224,8 @@ export class QwcGrpcServices extends QwcHotReloadElement { + content='\n\n\n\n' + theme='${themeState.theme.name}'> `; diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/HelloWorldTlsEndpointTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/HelloWorldTlsEndpointTest.java index d465032c4857c9..ec0acc2e3e07bb 100644 --- a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/HelloWorldTlsEndpointTest.java +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/HelloWorldTlsEndpointTest.java @@ -10,7 +10,11 @@ import io.quarkus.grpc.client.tls.HelloWorldTlsEndpoint; import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; +@Certificates(baseDir = "target/certs", certificates = @Certificate(name = "grpc-client-tls", formats = Format.PEM)) class HelloWorldTlsEndpointTest { @RegisterExtension diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithJKSTrustStoreWithHttpServerTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithJKSTrustStoreWithHttpServerTest.java new file mode 100644 index 00000000000000..0aa557be56e167 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithJKSTrustStoreWithHttpServerTest.java @@ -0,0 +1,60 @@ +package io.quarkus.grpc.client.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.quarkus.grpc.GrpcClient; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class MtlsWithJKSTrustStoreWithHttpServerTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-jks.path=target/certs/grpc-client-truststore.jks + quarkus.grpc.clients.hello.tls.trust-certificate-jks.password=password + quarkus.grpc.clients.hello.tls.key-certificate-jks.path=target/certs/grpc-client-keystore.jks + quarkus.grpc.clients.hello.tls.key-certificate-jks.password=password + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.use-separate-server=false + quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.ssl.certificate.trust-store-file=target/certs/grpc-server-truststore.jks + quarkus.http.ssl.certificate.trust-store-password=password + quarkus.http.ssl.client-auth=REQUIRED + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @GrpcClient("hello") + GreeterGrpc.GreeterBlockingStub blockingHelloService; + + @Test + void testClientTlsConfiguration() { + HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithP12TrustStoreWithHttpServerTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithP12TrustStoreWithHttpServerTest.java new file mode 100644 index 00000000000000..458e93ccc47ddb --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithP12TrustStoreWithHttpServerTest.java @@ -0,0 +1,60 @@ +package io.quarkus.grpc.client.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.quarkus.grpc.GrpcClient; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class MtlsWithP12TrustStoreWithHttpServerTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-p12.path=target/certs/grpc-client-truststore.p12 + quarkus.grpc.clients.hello.tls.trust-certificate-p12.password=password + quarkus.grpc.clients.hello.tls.key-certificate-p12.path=target/certs/grpc-client-keystore.p12 + quarkus.grpc.clients.hello.tls.key-certificate-p12.password=password + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.use-separate-server=false + quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.ssl.certificate.trust-store-file=target/certs/grpc-server-truststore.jks + quarkus.http.ssl.certificate.trust-store-password=password + quarkus.http.ssl.client-auth=REQUIRED + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @GrpcClient("hello") + GreeterGrpc.GreeterBlockingStub blockingHelloService; + + @Test + void testClientTlsConfiguration() { + HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithPemTrustStoreWithHttpServerTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithPemTrustStoreWithHttpServerTest.java new file mode 100644 index 00000000000000..e8a42172f0fffe --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithPemTrustStoreWithHttpServerTest.java @@ -0,0 +1,59 @@ +package io.quarkus.grpc.client.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.quarkus.grpc.GrpcClient; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class MtlsWithPemTrustStoreWithHttpServerTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-pem.certs=target/certs/grpc-client-ca.crt + quarkus.grpc.clients.hello.tls.key-certificate-pem.certs=target/certs/grpc-client.crt + quarkus.grpc.clients.hello.tls.key-certificate-pem.keys=target/certs/grpc-client.key + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.use-separate-server=false + quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.ssl.certificate.trust-store-file=target/certs/grpc-server-truststore.jks + quarkus.http.ssl.certificate.trust-store-password=password + quarkus.http.ssl.client-auth=REQUIRED + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @GrpcClient("hello") + GreeterGrpc.GreeterBlockingStub blockingHelloService; + + @Test + void testClientTlsConfiguration() { + HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithJKSTrustStoreTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithJKSTrustStoreTest.java new file mode 100644 index 00000000000000..00adb5bbe3bc8e --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithJKSTrustStoreTest.java @@ -0,0 +1,47 @@ +package io.quarkus.grpc.client.tls; + +import static io.restassured.RestAssured.get; +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class TlsWithJKSTrustStoreTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.host=localhost + quarkus.grpc.clients.hello.port=9001 + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-jks.path=target/certs/grpc-client-truststore.jks + quarkus.grpc.clients.hello.tls.trust-certificate-jks.password=password + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.ssl.certificate=target/certs/grpc.crt + quarkus.grpc.server.ssl.key=target/certs/grpc.key + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(io.grpc.examples.helloworld.GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @Test + void testClientTlsConfiguration() { + String response = get("/hello/blocking/neo").asString(); + assertThat(response).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithJKSTrustStoreWithHttpServerTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithJKSTrustStoreWithHttpServerTest.java new file mode 100644 index 00000000000000..851349cf67dc23 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithJKSTrustStoreWithHttpServerTest.java @@ -0,0 +1,55 @@ +package io.quarkus.grpc.client.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.quarkus.grpc.GrpcClient; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class TlsWithJKSTrustStoreWithHttpServerTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-jks.path=target/certs/grpc-client-truststore.jks + quarkus.grpc.clients.hello.tls.trust-certificate-jks.password=password + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.use-separate-server=false + quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(io.grpc.examples.helloworld.GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @GrpcClient("hello") + GreeterGrpc.GreeterBlockingStub blockingHelloService; + + @Test + void testClientTlsConfiguration() { + HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithP12TrustStoreTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithP12TrustStoreTest.java new file mode 100644 index 00000000000000..c456376c9e9384 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithP12TrustStoreTest.java @@ -0,0 +1,47 @@ +package io.quarkus.grpc.client.tls; + +import static io.restassured.RestAssured.get; +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class TlsWithP12TrustStoreTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.host=localhost + quarkus.grpc.clients.hello.port=9001 + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-p12.path=target/certs/grpc-client-truststore.p12 + quarkus.grpc.clients.hello.tls.trust-certificate-p12.password=password + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.ssl.certificate=target/certs/grpc.crt + quarkus.grpc.server.ssl.key=target/certs/grpc.key + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(io.grpc.examples.helloworld.GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @Test + void testClientTlsConfiguration() { + String response = get("/hello/blocking/neo").asString(); + assertThat(response).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithP12TrustStoreWithHttpServerTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithP12TrustStoreWithHttpServerTest.java new file mode 100644 index 00000000000000..380c04b729d266 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithP12TrustStoreWithHttpServerTest.java @@ -0,0 +1,55 @@ +package io.quarkus.grpc.client.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.quarkus.grpc.GrpcClient; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class TlsWithP12TrustStoreWithHttpServerTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-p12.path=target/certs/grpc-client-truststore.p12 + quarkus.grpc.clients.hello.tls.trust-certificate-p12.password=password + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.use-separate-server=false + quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @GrpcClient("hello") + GreeterGrpc.GreeterBlockingStub blockingHelloService; + + @Test + void testClientTlsConfiguration() { + HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithPemTrustStoreTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithPemTrustStoreTest.java new file mode 100644 index 00000000000000..64a6a2682daf1a --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithPemTrustStoreTest.java @@ -0,0 +1,46 @@ +package io.quarkus.grpc.client.tls; + +import static io.restassured.RestAssured.get; +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class TlsWithPemTrustStoreTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.host=localhost + quarkus.grpc.clients.hello.port=9001 + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-pem.certs=target/certs/grpc-client-ca.crt + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.ssl.certificate=target/certs/grpc.crt + quarkus.grpc.server.ssl.key=target/certs/grpc.key + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(io.grpc.examples.helloworld.GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @Test + void testClientTlsConfiguration() { + String response = get("/hello/blocking/neo").asString(); + assertThat(response).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithPemTrustStoreWithHttpServerTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithPemTrustStoreWithHttpServerTest.java new file mode 100644 index 00000000000000..ace79d838275dd --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithPemTrustStoreWithHttpServerTest.java @@ -0,0 +1,54 @@ +package io.quarkus.grpc.client.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.quarkus.grpc.GrpcClient; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +class TlsWithPemTrustStoreWithHttpServerTest { + + private static final String configuration = """ + quarkus.grpc.clients.hello.plain-text=false + quarkus.grpc.clients.hello.tls.trust-certificate-pem.certs=target/certs/grpc-client-ca.crt + quarkus.grpc.clients.hello.tls.enabled=true + quarkus.grpc.clients.hello.use-quarkus-grpc-client=true + + quarkus.grpc.server.use-separate-server=false + quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(HelloWorldTlsEndpoint.class.getPackage()) + .addPackage(GreeterGrpc.class.getPackage()) + .add(new StringAsset(configuration), "application.properties")); + + @GrpcClient("hello") + GreeterGrpc.GreeterBlockingStub blockingHelloService; + + @Test + void testClientTlsConfiguration() { + HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/MutinyGrpcServiceWithSSLTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/MutinyGrpcServiceWithSSLTest.java index edcf342c1f61ca..5b66f4da9fe3d0 100644 --- a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/MutinyGrpcServiceWithSSLTest.java +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/MutinyGrpcServiceWithSSLTest.java @@ -1,5 +1,6 @@ package io.quarkus.grpc.server; +import java.io.File; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; @@ -31,11 +32,16 @@ import io.quarkus.grpc.server.services.MutinyHelloService; import io.quarkus.grpc.server.services.MutinyTestService; import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; /** * Test services exposed by the gRPC server implemented using the regular gRPC model. * Communication uses TLS. */ +@Certificates(baseDir = "target/certs", certificates = @Certificate(name = "grpc-tls", password = "wibble", formats = { + Format.JKS, Format.PEM, Format.PKCS12 })) public class MutinyGrpcServiceWithSSLTest extends GrpcServiceTestBase { @RegisterExtension @@ -54,7 +60,7 @@ public class MutinyGrpcServiceWithSSLTest extends GrpcServiceTestBase { @BeforeEach public void init() throws Exception { SslContext sslcontext = GrpcSslContexts.forClient() - .trustManager(createTrustAllTrustManager()) + .trustManager(new File("target/certs/grpc-tls-ca.crt")) .build(); channel = NettyChannelBuilder.forAddress("localhost", 9001) .sslContext(sslcontext) diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLFromClasspathTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLFromClasspathTest.java index 33c0f848933630..6509bcf88dd44c 100644 --- a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLFromClasspathTest.java +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLFromClasspathTest.java @@ -30,11 +30,16 @@ import io.quarkus.grpc.server.services.HelloService; import io.quarkus.grpc.server.services.TestService; import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; /** * Test services exposed by the gRPC server implemented using the regular gRPC model. * Communication uses TLS and the key is loaded from the classpath. */ +@Certificates(baseDir = "target/certs", certificates = @Certificate(name = "grpc-tls", password = "wibble", formats = { + Format.JKS, Format.PEM, Format.PKCS12 })) public class RegularGrpcServiceWithSSLFromClasspathTest extends GrpcServiceTestBase { @RegisterExtension @@ -46,14 +51,14 @@ public class RegularGrpcServiceWithSSLFromClasspathTest extends GrpcServiceTestB HelloRequestOrBuilder.class, HelloReplyOrBuilder.class, EmptyProtos.class, Messages.class, MutinyTestServiceGrpc.class, TestServiceGrpc.class) - .addAsResource(new File("src/test/resources/tls/server-keystore.jks"), "server-keystore.jks")) + .addAsResource(new File("target/certs/grpc-tls-keystore.jks"), "server-keystore.jks")) .withConfigurationResource("grpc-server-tls-classpath-configuration.properties"); @Override @BeforeEach public void init() throws Exception { SslContext sslcontext = GrpcSslContexts.forClient() - .trustManager(createTrustAllTrustManager()) + .trustManager(new File("target/certs/grpc-tls-ca.crt")) .build(); channel = NettyChannelBuilder.forAddress("localhost", 9001) .sslContext(sslcontext) diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLTest.java index 835b0dd868d703..3b22054f041b3b 100644 --- a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLTest.java +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/RegularGrpcServiceWithSSLTest.java @@ -1,5 +1,6 @@ package io.quarkus.grpc.server; +import java.io.File; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; @@ -29,11 +30,16 @@ import io.quarkus.grpc.server.services.HelloService; import io.quarkus.grpc.server.services.TestService; import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; /** * Test services exposed by the gRPC server implemented using the regular gRPC model. * Communication uses TLS. */ +@Certificates(baseDir = "target/certs", certificates = @Certificate(name = "grpc-tls", password = "wibble", formats = { + Format.JKS, Format.PEM, Format.PKCS12 })) public class RegularGrpcServiceWithSSLTest extends GrpcServiceTestBase { @RegisterExtension @@ -52,7 +58,7 @@ public class RegularGrpcServiceWithSSLTest extends GrpcServiceTestBase { @BeforeEach public void init() throws Exception { SslContext sslcontext = GrpcSslContexts.forClient() - .trustManager(createTrustAllTrustManager()) + .trustManager(new File("target/certs/grpc-tls-ca.crt")) .build(); channel = NettyChannelBuilder.forAddress("localhost", 9001) .sslContext(sslcontext) diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingJKSTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingJKSTest.java new file mode 100644 index 00000000000000..36fed74b5500e7 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingJKSTest.java @@ -0,0 +1,76 @@ +package io.quarkus.grpc.server.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.ManagedChannel; +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.NettyChannelBuilder; +import io.netty.handler.ssl.SslContext; +import io.quarkus.grpc.server.services.HelloService; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +public class TlsWithHttpServerUsingJKSTest { + + static String configuration = """ + quarkus.grpc.server.use-separate-server=false + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(GreeterGrpc.class.getPackage()) + .addClass(HelloService.class) + .add(new StringAsset(configuration), "application.properties")); + + protected ManagedChannel channel; + + @BeforeEach + public void init() throws Exception { + File certs = new File("target/certs/grpc-client-ca.crt"); + SslContext sslcontext = GrpcSslContexts.forClient() + .trustManager(certs) + .build(); + channel = NettyChannelBuilder.forAddress("localhost", 8444) + .sslContext(sslcontext) + .useTransportSecurity() + .build(); + } + + @AfterEach + public void shutdown() { + if (channel != null) { + channel.shutdownNow(); + } + } + + @Test + public void testInvokingGrpcServiceUsingTls() { + HelloReply reply = GreeterGrpc.newBlockingStub(channel) + .sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } + +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingP12Test.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingP12Test.java new file mode 100644 index 00000000000000..57ee009e332be9 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingP12Test.java @@ -0,0 +1,76 @@ +package io.quarkus.grpc.server.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.ManagedChannel; +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.NettyChannelBuilder; +import io.netty.handler.ssl.SslContext; +import io.quarkus.grpc.server.services.HelloService; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +public class TlsWithHttpServerUsingP12Test { + + static String configuration = """ + quarkus.grpc.server.use-separate-server=false + + quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.p12 + quarkus.http.ssl.certificate.key-store-password=password + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(GreeterGrpc.class.getPackage()) + .addClass(HelloService.class) + .add(new StringAsset(configuration), "application.properties")); + + protected ManagedChannel channel; + + @BeforeEach + public void init() throws Exception { + File certs = new File("target/certs/grpc-client-ca.crt"); + SslContext sslcontext = GrpcSslContexts.forClient() + .trustManager(certs) + .build(); + channel = NettyChannelBuilder.forAddress("localhost", 8444) + .sslContext(sslcontext) + .useTransportSecurity() + .build(); + } + + @AfterEach + public void shutdown() { + if (channel != null) { + channel.shutdownNow(); + } + } + + @Test + public void testInvokingGrpcServiceUsingTls() { + HelloReply reply = GreeterGrpc.newBlockingStub(channel) + .sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } + +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingPemTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingPemTest.java new file mode 100644 index 00000000000000..0b1b19cad66dbe --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingPemTest.java @@ -0,0 +1,76 @@ +package io.quarkus.grpc.server.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.ManagedChannel; +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.NettyChannelBuilder; +import io.netty.handler.ssl.SslContext; +import io.quarkus.grpc.server.services.HelloService; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +public class TlsWithHttpServerUsingPemTest { + + static String configuration = """ + quarkus.grpc.server.use-separate-server=false + + quarkus.http.ssl.certificate.files=target/certs/grpc.crt + quarkus.http.ssl.certificate.key-files=target/certs/grpc.key + quarkus.http.insecure-requests=disabled + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(GreeterGrpc.class.getPackage()) + .addClass(HelloService.class) + .add(new StringAsset(configuration), "application.properties")); + + protected ManagedChannel channel; + + @BeforeEach + public void init() throws Exception { + File certs = new File("target/certs/grpc-client-ca.crt"); + SslContext sslcontext = GrpcSslContexts.forClient() + .trustManager(certs) + .build(); + channel = NettyChannelBuilder.forAddress("localhost", 8444) + .sslContext(sslcontext) + .useTransportSecurity() + .build(); + } + + @AfterEach + public void shutdown() { + if (channel != null) { + channel.shutdownNow(); + } + } + + @Test + public void testInvokingGrpcServiceUsingTls() { + HelloReply reply = GreeterGrpc.newBlockingStub(channel) + .sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } + +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithJksKeyStoreTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithJksKeyStoreTest.java new file mode 100644 index 00000000000000..17097fff7bed92 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithJksKeyStoreTest.java @@ -0,0 +1,74 @@ +package io.quarkus.grpc.server.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.*; +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.NettyChannelBuilder; +import io.netty.handler.ssl.SslContext; +import io.quarkus.grpc.server.services.HelloService; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +public class TlsWithJksKeyStoreTest { + + static String configuration = """ + quarkus.grpc.server.ssl.key-store=target/certs/grpc-keystore.jks + quarkus.grpc.server.ssl.key-store-password=password + quarkus.grpc.server.alpn=true + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(GreeterGrpc.class.getPackage()) + .addClass(HelloService.class) + .add(new StringAsset(configuration), "application.properties")); + + protected ManagedChannel channel; + + @BeforeEach + public void init() throws Exception { + File certs = new File("target/certs/grpc-client-ca.crt"); + SslContext sslcontext = GrpcSslContexts.forClient() + .trustManager(certs) + .build(); + channel = NettyChannelBuilder.forAddress("localhost", 9001) + .sslContext(sslcontext) + .useTransportSecurity() + .build(); + } + + @AfterEach + public void shutdown() { + if (channel != null) { + channel.shutdownNow(); + } + } + + @Test + public void testInvokingGrpcServiceUsingTls() { + HelloReply reply = GreeterGrpc.newBlockingStub(channel) + .sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } + +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithP12KeyStoreTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithP12KeyStoreTest.java new file mode 100644 index 00000000000000..4a566a941c04a2 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithP12KeyStoreTest.java @@ -0,0 +1,74 @@ +package io.quarkus.grpc.server.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.ManagedChannel; +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.NettyChannelBuilder; +import io.netty.handler.ssl.SslContext; +import io.quarkus.grpc.server.services.HelloService; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +public class TlsWithP12KeyStoreTest { + + static String configuration = """ + quarkus.grpc.server.ssl.key-store=target/certs/grpc-keystore.p12 + quarkus.grpc.server.ssl.key-store-password=password + quarkus.grpc.server.alpn=true + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(GreeterGrpc.class.getPackage()) + .addClass(HelloService.class) + .add(new StringAsset(configuration), "application.properties")); + + protected ManagedChannel channel; + + @BeforeEach + public void init() throws Exception { + File certs = new File("target/certs/grpc-client-ca.crt"); + SslContext sslcontext = GrpcSslContexts.forClient() + .trustManager(certs) + .build(); + channel = NettyChannelBuilder.forAddress("localhost", 9001) + .sslContext(sslcontext) + .useTransportSecurity() + .build(); + } + + @AfterEach + public void shutdown() { + if (channel != null) { + channel.shutdownNow(); + } + } + + @Test + public void testInvokingGrpcServiceUsingTls() { + HelloReply reply = GreeterGrpc.newBlockingStub(channel) + .sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } + +} diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithPemKeyStoreTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithPemKeyStoreTest.java new file mode 100644 index 00000000000000..dc19a4b22a1463 --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithPemKeyStoreTest.java @@ -0,0 +1,74 @@ +package io.quarkus.grpc.server.tls; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.File; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.grpc.ManagedChannel; +import io.grpc.examples.helloworld.GreeterGrpc; +import io.grpc.examples.helloworld.HelloReply; +import io.grpc.examples.helloworld.HelloRequest; +import io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.NettyChannelBuilder; +import io.netty.handler.ssl.SslContext; +import io.quarkus.grpc.server.services.HelloService; +import io.quarkus.test.QuarkusUnitTest; +import me.escoffier.certs.Format; +import me.escoffier.certs.junit5.Certificate; +import me.escoffier.certs.junit5.Certificates; + +@Certificates(baseDir = "target/certs", certificates = { + @Certificate(name = "grpc", password = "password", formats = { Format.JKS, Format.PEM, Format.PKCS12 }, client = true) +}) +public class TlsWithPemKeyStoreTest { + + static String configuration = """ + quarkus.grpc.server.ssl.certificate=target/certs/grpc.crt + quarkus.grpc.server.ssl.key=target/certs/grpc.key + quarkus.grpc.server.alpn=true + """; + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addPackage(GreeterGrpc.class.getPackage()) + .addClass(HelloService.class) + .add(new StringAsset(configuration), "application.properties")); + + protected ManagedChannel channel; + + @BeforeEach + public void init() throws Exception { + File certs = new File("target/certs/grpc-client-ca.crt"); + SslContext sslcontext = GrpcSslContexts.forClient() + .trustManager(certs) + .build(); + channel = NettyChannelBuilder.forAddress("localhost", 9001) + .sslContext(sslcontext) + .useTransportSecurity() + .build(); + } + + @AfterEach + public void shutdown() { + if (channel != null) { + channel.shutdownNow(); + } + } + + @Test + public void testInvokingGrpcServiceUsingTls() { + HelloReply reply = GreeterGrpc.newBlockingStub(channel) + .sayHello(HelloRequest.newBuilder().setName("neo").build()); + assertThat(reply.getMessage()).isEqualTo("Hello neo"); + } + +} diff --git a/extensions/grpc/deployment/src/test/resources/grpc-client-tls-configuration.properties b/extensions/grpc/deployment/src/test/resources/grpc-client-tls-configuration.properties index c393ab7d3fd1ef..ba7bb700876412 100644 --- a/extensions/grpc/deployment/src/test/resources/grpc-client-tls-configuration.properties +++ b/extensions/grpc/deployment/src/test/resources/grpc-client-tls-configuration.properties @@ -1,6 +1,6 @@ quarkus.grpc.clients.hello.host=localhost quarkus.grpc.clients.hello.port=9001 -quarkus.grpc.clients.hello.ssl.trust-store=src/test/resources/tls-from-file/ca.pem +quarkus.grpc.clients.hello.ssl.trust-store=target/certs/grpc-client-tls-ca.crt -quarkus.grpc.server.ssl.certificate=src/test/resources/tls-from-file/server.pem -quarkus.grpc.server.ssl.key=src/test/resources/tls-from-file/server.key \ No newline at end of file +quarkus.grpc.server.ssl.certificate=target/certs/grpc-client-tls.crt +quarkus.grpc.server.ssl.key=target/certs/grpc-client-tls.key \ No newline at end of file diff --git a/extensions/grpc/deployment/src/test/resources/grpc-server-tls-configuration.properties b/extensions/grpc/deployment/src/test/resources/grpc-server-tls-configuration.properties index b7d56ade9f0152..57700bf9e33c83 100644 --- a/extensions/grpc/deployment/src/test/resources/grpc-server-tls-configuration.properties +++ b/extensions/grpc/deployment/src/test/resources/grpc-server-tls-configuration.properties @@ -1,3 +1,3 @@ -quarkus.grpc.server.ssl.key-store=src/test/resources/tls/server-keystore.jks +quarkus.grpc.server.ssl.key-store=target/certs/grpc-tls-keystore.jks quarkus.grpc.server.ssl.key-store-password=wibble quarkus.grpc.server.alpn=true diff --git a/extensions/grpc/deployment/src/test/resources/tls-from-file/README.md b/extensions/grpc/deployment/src/test/resources/tls-from-file/README.md deleted file mode 100644 index 8c1886e98b5e4d..00000000000000 --- a/extensions/grpc/deployment/src/test/resources/tls-from-file/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# Generating the certificates and keys - -The ca is self-signed: ----------------------- - -```bash -openssl req -x509 -new -newkey rsa:2048 -nodes -keyout ca.key -out ca.pem \ - -config ca-openssl.cnf -days 3650 -extensions v3_req -``` - -When prompted for certificate information, everything is default. - -Client is issued by CA: ------------------------ - -```bash -openssl genrsa -out client.key.rsa 2048 -openssl pkcs8 -topk8 -in client.key.rsa -out client.key -nocrypt -openssl req -new -key client.key -out client.csr -``` - -When prompted for certificate information, everything is default except the -common name which is set to `testclient`. - -```bash -openssl x509 -req -CA ca.pem -CAkey ca.key -CAcreateserial -in client.csr \ - -out client.pem -days 3650 -``` - -server is issued by CA with a special config for subject alternative names: ----------------------------------------------------------------------------- - -```bash -openssl genrsa -out server1.key.rsa 2048 -openssl pkcs8 -topk8 -in server.key.rsa -out server.key -nocrypt -openssl req -new -key server.key -out server.csr -config server-openssl.cnf -``` - -When prompted for certificate information, everything is default except the -common name which is set to `localhost`. - -```bash -openssl x509 -req -CA ca.pem -CAkey ca.key -CAcreateserial -in server.csr \ - -out server.pem -extfile server-openssl.cnf -days 3650 -``` - -Cleanup -------- - -```bash -rm *.rsa -rm *.csr -rm ca.srl -``` \ No newline at end of file diff --git a/extensions/grpc/deployment/src/test/resources/tls-from-file/ca-openssl.cnf b/extensions/grpc/deployment/src/test/resources/tls-from-file/ca-openssl.cnf deleted file mode 100644 index 7a8528ec2304a1..00000000000000 --- a/extensions/grpc/deployment/src/test/resources/tls-from-file/ca-openssl.cnf +++ /dev/null @@ -1,17 +0,0 @@ -[req] -distinguished_name = req_distinguished_name -req_extensions = v3_req - -[req_distinguished_name] -countryName = Country Name (2 letter code) -countryName_default = FR -stateOrProvinceName = State or Province Name (full name) -stateOrProvinceName_default = Some-State -organizationName = Organization Name (eg, company) -organizationName_default = Acme Corp -commonName = Common Name (eg, YOUR name) -commonName_default = testca - -[v3_req] -basicConstraints = CA:true -keyUsage = critical, keyCertSign \ No newline at end of file diff --git a/extensions/grpc/deployment/src/test/resources/tls-from-file/ca.pem b/extensions/grpc/deployment/src/test/resources/tls-from-file/ca.pem deleted file mode 100644 index b3c622bc96b47c..00000000000000 --- a/extensions/grpc/deployment/src/test/resources/tls-from-file/ca.pem +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDMTCCAhmgAwIBAgIJAKH9BBbnY/fjMA0GCSqGSIb3DQEBCwUAMEcxCzAJBgNV -BAYTAkZSMRMwEQYDVQQIDApTb21lLVN0YXRlMRIwEAYDVQQKDAlBY21lIENvcnAx -DzANBgNVBAMMBnRlc3RjYTAeFw0yMDA0MDQwNzI5NTdaFw0zMDA0MDIwNzI5NTda -MEcxCzAJBgNVBAYTAkZSMRMwEQYDVQQIDApTb21lLVN0YXRlMRIwEAYDVQQKDAlB -Y21lIENvcnAxDzANBgNVBAMMBnRlc3RjYTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAJqJz93r5o5q+yr2zX97CZaW+6pqVhdf8x73x+/E+9fIML96pvUJ -VQJSMVV+dOHqb860inw9WE7itckTlArB5vpVfjGunbCGOxNTgaUBMJeaalXgFUkL -36mFpGhwXVaUVKdrCSbol3R/eKAc6qcN4g/plKgSnGtODjxd1za64OhZn3Cz8tUN -yxmnGlFkOU7S3F8YF2aTmIjZZcs7JF93VmLyVzDkWzlySmKqXXt1xpzFADCMzHjR -GuUaOilC9dstvfZHNhQv2TIBYHwltShYV/86J+p5RXlRkaHxfnqKgPenw82SehWs -2eGkgLFnUp1Q9nTwIB4l7NmEyJzzTg0xMj8CAwEAAaMgMB4wDAYDVR0TBAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAgQwDQYJKoZIhvcNAQELBQADggEBAAxVmtqKlPaXOIDv -ZHaLLt0TdhukEqf13nMPSb6sxA0eOD3GNR3CUBMfftVRkEVjvqZRbYjJTnG9IW8E -39tESrLPnoWEYdvDJBM4sTsG2JLVpaq6mNcMQABkAIjnbjq0yXK+WwP6Ug5HD7Ds -r8hbiu5T039v0uHxJOM/+nhkyyHMsAYaAnUKQ9F0fbo8Jp/d5KK70k5hMpb2smr1 -cYVkN9AZlBgOeciFfO1RrWGsnTo3Aln6wciSXPqqj42t8WZJp1s11Lqcml1zJqHe -dQkI9NjPZ67V0D0mytvIixchClMCl9E9AkXjGdapE1lQ3s6TK5t0COpo0VE4KAvd -cXRzvCI= ------END CERTIFICATE----- diff --git a/extensions/grpc/deployment/src/test/resources/tls-from-file/server-openssl.cnf b/extensions/grpc/deployment/src/test/resources/tls-from-file/server-openssl.cnf deleted file mode 100644 index 3962a52d790d4b..00000000000000 --- a/extensions/grpc/deployment/src/test/resources/tls-from-file/server-openssl.cnf +++ /dev/null @@ -1,80 +0,0 @@ -[req] -distinguished_name = req_distinguished_name -req_extensions = v3_req - -[req_distinguished_name] -countryName = Country Name (2 letter code) -countryName_default = FR -stateOrProvinceName = State or Province Name (full name) -stateOrProvinceName_default = Some-State -localityName = Locality Name (eg, city) -localityName_default = Valence -organizationName = Organization Name (eg, company) -organizationName_default = Acme Corp -commonName = Common Name (eg, YOUR name) -commonName_max = 64 - -#################################################################### -[ ca ] -default_ca = CA_default # The default ca section - -#################################################################### -[ CA_default ] - -dir = . # Where everything is kept -certs = $dir # Where the issued certs are kept -crl_dir = $dir # Where the issued crl are kept -database = $dir/index.txt # database index file. -new_certs_dir = $dir # default place for new certs. - -certificate = $dir/ca.pem # The CA certificate -serial = $dir/serial # The current serial number -crlnumber = $dir/crlnumber # the current crl number - # must be commented out to leave a V1 CRL -crl = $dir/crl.pem # The current CRL -private_key = $dir/private/cakey.pem# The private key -RANDFILE = $dir/private/.rand # private random number file - -x509_extensions = usr_cert - -# Comment out the following two lines for the "traditional" -# (and highly broken) format. -name_opt = ca_default # Subject Name options -cert_opt = ca_default # Certificate field options - -# Extension copying option: use with caution. -# copy_extensions = copy - -# Extensions to add to a CRL. Note: Netscape communicator chokes on V2 CRLs -# so this is commented out by default to leave a V1 CRL. -# crlnumber must also be commented out to leave a V1 CRL. -# crl_extensions = crl_ext - -default_days = 365 # how long to certify for -default_crl_days= 30 # how long before next CRL -default_md = default # use public key default MD -preserve = no # keep passed DN ordering - -# A few difference way of specifying how similar the request should look -# For type CA, the listed attributes must be the same, and the optional -# and supplied fields are just that :-) -policy = policy_anything -[ policy_anything ] -countryName = optional -stateOrProvinceName = optional -localityName = optional -organizationName = optional -organizationalUnitName = optional -commonName = supplied -emailAddress = optional - -[v3_req] -basicConstraints = CA:FALSE -keyUsage = nonRepudiation, digitalSignature, keyEncipherment -subjectAltName = @alt_names - -[alt_names] -DNS.1 = localhost -DNS.2 = *.test.com -IP.1 = "127.0.0.1" -IP.2 = "192.168.1.3" \ No newline at end of file diff --git a/extensions/grpc/deployment/src/test/resources/tls-from-file/server.key b/extensions/grpc/deployment/src/test/resources/tls-from-file/server.key deleted file mode 100644 index b49d98c926e61c..00000000000000 --- a/extensions/grpc/deployment/src/test/resources/tls-from-file/server.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCy40A8Keuwa6aK -NRxqa5nbrNpQnWF1hd9kO848UzUI06X9GLve459Q4KCNUDgUA1PL6zUaVbFpmLrp -6iBimxr1u+HqVyYkha4veLXKA1+nLcp0gUOQTb4Dk6jHdGhgwIaPo8saFO9sY2Vy -+xh8MU8cepjZtxgt9cI/+RGQgg0uEz7NT3tjlX3b7Y7SQChv2fsA6ZuE/3v2XBLg -zCgNEfdYh46B0pkE3Od3xq+cieOOuFHCeHFX+wZcCJWHa7tLxsgKIp+QohqG4MoF -KWCf7484nu8EYd7YzNdzPN6XmcQuw2IUr2YjDFaY0EZ6ERTkSRoMBbvjq9bGPYix -kPHwQnOFAgMBAAECggEBAJP7yJy8tRvpwgidLRegUdRXZva/aus0xvt9OfvPfZUC -uVLpzijxtk7KtCaS0QFFS2Hq/q/9admIHj/5jbbkxuW3+ojIdWZLLDBbNE+cgNmk -2NGOSZ0rouAEm9/8HYjEW8yh3BeEcBgDFd6Ld9LtW5uck6uvepIytvIDEhOwckTF -BtrtJubHIAjeTFfdikcNuQbQHwWDWu8ykRG7gXyEGXJjWXV+lRZqY+JtBm8TpF2H -6pj1DwPcBt0yt7LoXrrfIlMabqdWXgmfu+j5kvxyZNWOg56G48hfFIg3e8dri/7/ -xHI8t3dnN/L67VbHMor3xMYmeGQQnq9Zkv5tcj+c40ECgYEA2Q5svQA66l6ubpJo -sSKfBIskIe/WuzWB0YmV0jdg0mqivG/bxObK0sZr6Jcd4T3v5J3PMit0G3gOfhPO -sD6otk/Lo9d7whOfskjG3kDODrx+mRVHhVfamUcqzk722OrDGh9NeCHaCbPBAhMd -tmugi+SF1eSUPtfAwqW9o0qP7NUCgYEA0vu1gOxFZPbIdThNIGyzudn1+FSpEc67 -kiB/FJnculpPf9fRaL//dMG+sItUwFvSTJ+xYf+mRcSXl6imY7zSTgakjAp+mzMZ -LPZeMY9qZxEgIIiLcX/WVi6Ldveukh9Mbzd6v+dfqZJEBiA5EDfyjdVl6APRmybv -3VR/rHoGA/ECgYEA12TMHWY8ENXyTUG26IkNukmFirPhqBd6AwCAj4Jq74PoiAyM -z0Gj5jQHY2GKwjlfdMPxl7ytVJD3+L8ZLaaQb5KR573vTvGAWUCFMIqosND25FzM -g5NiFxcbcG3F4g5dm++SRfN51oTttGxZ4Ou+/vPAqDhTsGUUIVSt8nwMhR0CgYAM -iTQxothEtX0Xqe67PHo5UsAQr0cUbcorVo72dGXvFKqgl/wzUyUklNZ1uvGgNFR8 -hQiPIBeLEbFIK5cnWfLM/AwO3hjDs/eM+l6CZ1kVIqlcBYDzj3r2x/E1cmYG/KEY -Ap3ihSbj9nLgQk3hrtFUqBdT/9YWd+vMpNapMt7koQKBgBNzxUtQOKcT3EaJytfe -LqVayA6mfhjZisUtw1ufg9iPUDyGo8Wj3h5QI0aY9240Bztp/f/qWRF0vCcA6mNm -U4edOI1EjWqvgkLw0Nav3xSNO4jDchA/A9Lj2y6mjGmmaj5E17vowRz1s3GHDtIX -lpwir++lc7qUgPNttzfK/ltI ------END PRIVATE KEY----- diff --git a/extensions/grpc/deployment/src/test/resources/tls-from-file/server.pem b/extensions/grpc/deployment/src/test/resources/tls-from-file/server.pem deleted file mode 100644 index 0564aead544636..00000000000000 --- a/extensions/grpc/deployment/src/test/resources/tls-from-file/server.pem +++ /dev/null @@ -1,19 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDJDCCAgygAwIBAgIJAKhxPs8iIgiBMA0GCSqGSIb3DQEBBQUAMEcxCzAJBgNV -BAYTAkZSMRMwEQYDVQQIDApTb21lLVN0YXRlMRIwEAYDVQQKDAlBY21lIENvcnAx -DzANBgNVBAMMBnRlc3RjYTAeFw0yMDA0MDQwODEzMjhaFw0zMDA0MDIwODEzMjha -MFwxCzAJBgNVBAYTAkZSMRMwEQYDVQQIDApTb21lLVN0YXRlMRAwDgYDVQQHDAdW -YWxlbmNlMRIwEAYDVQQKDAlBY21lIENvcnAxEjAQBgNVBAMMCWxvY2FsaG9zdDCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALLjQDwp67Brpoo1HGprmdus -2lCdYXWF32Q7zjxTNQjTpf0Yu97jn1DgoI1QOBQDU8vrNRpVsWmYuunqIGKbGvW7 -4epXJiSFri94tcoDX6ctynSBQ5BNvgOTqMd0aGDAho+jyxoU72xjZXL7GHwxTxx6 -mNm3GC31wj/5EZCCDS4TPs1Pe2OVfdvtjtJAKG/Z+wDpm4T/e/ZcEuDMKA0R91iH -joHSmQTc53fGr5yJ4464UcJ4cVf7BlwIlYdru0vGyAoin5CiGobgygUpYJ/vjzie -7wRh3tjM13M83peZxC7DYhSvZiMMVpjQRnoRFORJGgwFu+Or1sY9iLGQ8fBCc4UC -AwEAATANBgkqhkiG9w0BAQUFAAOCAQEAFEYYp3wV1xH9+Z44GJSjTa9Ltg5gxqmf -t7ROsiy01vPuWQDhLILRpGQHdyz07AOmYsaGRTMOCcOsUCDY7u0NYi991HBEn1ZI -/+2tFNZnLTk/hLtNSW30MWt0sNE2d6DIMZQBAtvrWnFdrQAThzTQLeFpbi+nTX+O -KwtVpci8gMej90fEnyYRdNn/yTmjo79Q4+yuD9oKhvsOxy65CmSvXh9oLpqH7HzQ -TR6LfuWSpG6wGKy1JW5fXuwb0YJC1QjvAOVWr9zm79wyeXUuXfQ6A2SNHTPwwEbu -K+kV+UX7NofWhepi1QOxKnnO3EXbdN7fl44jEuzaGOKtmfhiRse4cQ== ------END CERTIFICATE----- diff --git a/extensions/grpc/deployment/src/test/resources/tls/client-truststore.jks b/extensions/grpc/deployment/src/test/resources/tls/client-truststore.jks deleted file mode 100644 index fbb345f0cd3bb9..00000000000000 Binary files a/extensions/grpc/deployment/src/test/resources/tls/client-truststore.jks and /dev/null differ diff --git a/extensions/grpc/deployment/src/test/resources/tls/server-keystore.jks b/extensions/grpc/deployment/src/test/resources/tls/server-keystore.jks deleted file mode 100644 index 8e68758e08cbce..00000000000000 Binary files a/extensions/grpc/deployment/src/test/resources/tls/server-keystore.jks and /dev/null differ diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java index f11186d3a30a65..2f95cf0248e87d 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java @@ -186,7 +186,7 @@ private void buildGrpcServer(Vertx vertx, GrpcServerConfiguration configuration, initHealthStorage(); - LOGGER.info("Starting new Vert.x gRPC server ..."); + LOGGER.info("Starting new Quarkus gRPC server (using Vert.x transport)..."); Route route = routerSupplier.getValue().route().handler(ctx -> { if (!isGrpc(ctx)) { ctx.next(); diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java index 16f1a0540f4090..80a9092fcb07b9 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java @@ -60,9 +60,16 @@ public class GrpcClientConfiguration { /** * The SSL/TLS config. + * Only use this if you want to use the old Java gRPC client. */ public SslClientConfig ssl; + /** + * The TLS config. + * Only use this if you want to use the Quarkus gRPC client. + */ + public TlsClientConfig tls; + /** * Use a name resolver. Defaults to dns. * If set to "stork", host will be treated as SmallRye Stork service name diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/TlsClientConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/TlsClientConfig.java new file mode 100644 index 00000000000000..a48f9786b6c214 --- /dev/null +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/TlsClientConfig.java @@ -0,0 +1,132 @@ +package io.quarkus.grpc.runtime.config; + +import java.util.List; +import java.util.Optional; + +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class TlsClientConfig { + + /** + * Whether SSL/TLS is enabled. + */ + @ConfigItem(defaultValue = "false") + public boolean enabled; + + /** + * Enable trusting all certificates. Disabled by default. + */ + @ConfigItem(defaultValue = "false") + public boolean trustAll; + + /** + * Trust configuration in the PEM format. + *

+ * When used, {@code trust-certificate-jks} and {@code trust-certificate-p12} must not be used. + */ + public PemTrustCertConfiguration trustCertificatePem; + + /** + * Trust configuration in the JKS format. + *

+ * When configured, {@code trust-certificate-pem} and {@code trust-certificate-p12} must not be used. + */ + public JksConfiguration trustCertificateJks; + + /** + * Trust configuration in the P12 format. + *

+ * When configured, {@code trust-certificate-jks} and {@code trust-certificate-pem} must not be used. + */ + public PfxConfiguration trustCertificateP12; + + /** + * Key/cert configuration in the PEM format. + *

+ * When configured, {@code key-certificate-jks} and {@code key-certificate-p12} must not be used. + */ + public PemKeyCertConfiguration keyCertificatePem; + + /** + * Key/cert configuration in the JKS format. + *

+ * When configured, {@code #key-certificate-pem} and {@code #key-certificate-p12} must not be used. + */ + public JksConfiguration keyCertificateJks; + + /** + * Key/cert configuration in the P12 format. + *

+ * When configured, {@code key-certificate-jks} and {@code #key-certificate-pem} must not be used. + */ + public PfxConfiguration keyCertificateP12; + + /** + * Whether hostname should be verified in the SSL/TLS handshake. + */ + @ConfigItem(defaultValue = "true") + public boolean verifyHostname; + + @ConfigGroup + public static class PemTrustCertConfiguration { + + /** + * Comma-separated list of the trust certificate files (Pem format). + */ + @ConfigItem + public Optional> certs; + + } + + @ConfigGroup + public static class JksConfiguration { + + /** + * Path of the key file (JKS format). + */ + @ConfigItem + public Optional path; + + /** + * Password of the key file. + */ + @ConfigItem + public Optional password; + } + + @ConfigGroup + public static class PfxConfiguration { + + /** + * Path to the key file (PFX format). + */ + @ConfigItem + public Optional path; + + /** + * Password of the key. + */ + @ConfigItem + public Optional password; + } + + @ConfigGroup + public static class PemKeyCertConfiguration { + + /** + * Comma-separated list of the path to the key files (Pem format). + */ + @ConfigItem + public Optional> keys; + + /** + * Comma-separated list of the path to the certificate files (Pem format). + */ + @ConfigItem + public Optional> certs; + + } + +} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java index 65831169c0b036..aafa71bdbb8807 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java @@ -5,6 +5,12 @@ import static io.grpc.netty.NettyChannelBuilder.DEFAULT_FLOW_CONTROL_WINDOW; import static io.quarkus.grpc.runtime.GrpcTestPortUtils.testPort; import static io.quarkus.grpc.runtime.config.GrpcClientConfiguration.DNS; +import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configureJksKeyCertOptions; +import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configureJksTrustOptions; +import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePemKeyCertOptions; +import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePemTrustOptions; +import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePfxKeyCertOptions; +import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePfxTrustOptions; import java.io.IOException; import java.io.InputStream; @@ -54,6 +60,7 @@ import io.quarkus.grpc.runtime.config.GrpcClientConfiguration; import io.quarkus.grpc.runtime.config.GrpcServerConfiguration; import io.quarkus.grpc.runtime.config.SslClientConfig; +import io.quarkus.grpc.runtime.config.TlsClientConfig; import io.quarkus.grpc.runtime.stork.StorkGrpcChannel; import io.quarkus.grpc.runtime.stork.StorkMeasuringGrpcInterceptor; import io.quarkus.grpc.runtime.stork.VertxStorkMeasuringGrpcInterceptor; @@ -250,7 +257,8 @@ public static Channel createChannel(String name, Set perClientIntercepto return builder.build(); } else { - HttpClientOptions options = new HttpClientOptions(); // TODO options + // Vert.x client + HttpClientOptions options = new HttpClientOptions(); options.setHttp2ClearTextUpgrade(false); // this fixes i30379 if (!plainText) { @@ -258,20 +266,34 @@ public static Channel createChannel(String name, Set perClientIntercepto options.setSsl(true); options.setUseAlpn(true); - if (config.ssl.trustStore.isPresent()) { - Optional trustStorePath = config.ssl.trustStore; - if (trustStorePath.isPresent()) { - PemTrustOptions to = new PemTrustOptions(); - to.addCertValue(bufferFor(trustStorePath.get(), "trust store")); - options.setTrustOptions(to); - } - Optional certificatePath = config.ssl.certificate; - Optional keyPath = config.ssl.key; - if (certificatePath.isPresent() && keyPath.isPresent()) { - PemKeyCertOptions cko = new PemKeyCertOptions(); - cko.setCertValue(bufferFor(certificatePath.get(), "certificate")); - cko.setKeyValue(bufferFor(keyPath.get(), "key")); - options.setKeyCertOptions(cko); + TlsClientConfig tls = config.tls; + if (tls.enabled) { + options.setSsl(true).setTrustAll(tls.trustAll); + + configurePemTrustOptions(options, tls.trustCertificatePem); + configureJksTrustOptions(options, tls.trustCertificateJks); + configurePfxTrustOptions(options, tls.trustCertificateP12); + + configurePemKeyCertOptions(options, tls.keyCertificatePem); + configureJksKeyCertOptions(options, tls.keyCertificateJks); + configurePfxKeyCertOptions(options, tls.keyCertificateP12); + options.setVerifyHost(tls.verifyHostname); + } else { + if (config.ssl.trustStore.isPresent()) { + Optional trustStorePath = config.ssl.trustStore; + if (trustStorePath.isPresent()) { + PemTrustOptions to = new PemTrustOptions(); + to.addCertValue(bufferFor(trustStorePath.get(), "trust store")); + options.setTrustOptions(to); + } + Optional certificatePath = config.ssl.certificate; + Optional keyPath = config.ssl.key; + if (certificatePath.isPresent() && keyPath.isPresent()) { + PemKeyCertOptions cko = new PemKeyCertOptions(); + cko.setCertValue(bufferFor(certificatePath.get(), "certificate")); + cko.setKeyValue(bufferFor(keyPath.get(), "key")); + options.setKeyCertOptions(cko); + } } } } @@ -309,7 +331,7 @@ public static Channel createChannel(String name, Set perClientIntercepto interceptors.addAll(interceptorContainer.getSortedPerServiceInterceptors(perClientInterceptors)); interceptors.addAll(interceptorContainer.getSortedGlobalInterceptors()); - LOGGER.info("Creating Vert.x gRPC channel ..."); + LOGGER.debug("Creating Vert.x gRPC channel ..."); return new InternalGrpcChannel(client, channel, ClientInterceptors.intercept(channel, interceptors)); } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/SSLConfigHelper.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/SSLConfigHelper.java new file mode 100644 index 00000000000000..c8e1f7e6cdbd74 --- /dev/null +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/SSLConfigHelper.java @@ -0,0 +1,120 @@ +package io.quarkus.grpc.runtime.supports; + +import io.quarkus.grpc.runtime.config.TlsClientConfig; +import io.vertx.core.net.JksOptions; +import io.vertx.core.net.KeyCertOptions; +import io.vertx.core.net.PemKeyCertOptions; +import io.vertx.core.net.PemTrustOptions; +import io.vertx.core.net.PfxOptions; +import io.vertx.core.net.TCPSSLOptions; + +public class SSLConfigHelper { + + public static void configurePemTrustOptions(TCPSSLOptions options, + TlsClientConfig.PemTrustCertConfiguration configuration) { + if ((configuration.certs.isPresent() && !configuration.certs.get().isEmpty())) { + ensureTrustOptionsNotSet(options); + options.setTrustOptions(toPemTrustOptions(configuration)); + } + } + + private static PemTrustOptions toPemTrustOptions(TlsClientConfig.PemTrustCertConfiguration configuration) { + PemTrustOptions pemTrustOptions = new PemTrustOptions(); + if (configuration.certs.isPresent()) { + for (String cert : configuration.certs.get()) { + pemTrustOptions.addCertPath(cert); + } + } + return pemTrustOptions; + } + + public static void configureJksTrustOptions(TCPSSLOptions options, TlsClientConfig.JksConfiguration configuration) { + if (configuration.path.isPresent()) { + ensureTrustOptionsNotSet(options); + options.setTrustOptions(toJksOptions(configuration)); + } + } + + private static JksOptions toJksOptions(TlsClientConfig.JksConfiguration configuration) { + JksOptions jksOptions = new JksOptions(); + if (configuration.path.isPresent()) { + jksOptions.setPath(configuration.path.get()); + } + if (configuration.password.isPresent()) { + jksOptions.setPassword(configuration.password.get()); + } + return jksOptions; + } + + public static void configurePfxTrustOptions(TCPSSLOptions options, TlsClientConfig.PfxConfiguration configuration) { + if (configuration.path.isPresent()) { + ensureTrustOptionsNotSet(options); + options.setTrustOptions(toPfxOptions(configuration)); + } + } + + private static PfxOptions toPfxOptions(TlsClientConfig.PfxConfiguration configuration) { + PfxOptions pfxOptions = new PfxOptions(); + if (configuration.path.isPresent()) { + pfxOptions.setPath(configuration.path.get()); + } + if (configuration.password.isPresent()) { + pfxOptions.setPassword(configuration.password.get()); + } + return pfxOptions; + } + + private static void ensureTrustOptionsNotSet(TCPSSLOptions options) { + if (options.getTrustOptions() != null) { + throw new IllegalArgumentException("Trust options have already been set"); + } + } + + public static void configurePemKeyCertOptions(TCPSSLOptions options, + TlsClientConfig.PemKeyCertConfiguration configuration) { + if (configuration.certs.isPresent() && !configuration.certs.get().isEmpty() && configuration.keys.isPresent() + && !configuration.keys.get().isEmpty()) { + ensureKeyCertOptionsNotSet(options); + options.setKeyCertOptions(toPemKeyCertOptions(configuration)); + } + } + + private static KeyCertOptions toPemKeyCertOptions(TlsClientConfig.PemKeyCertConfiguration configuration) { + PemKeyCertOptions pemKeyCertOptions = new PemKeyCertOptions(); + if (configuration.certs.isPresent()) { + for (String cert : configuration.certs.get()) { + pemKeyCertOptions.addCertPath(cert); + } + } + if (configuration.keys.isPresent()) { + for (String cert : configuration.keys.get()) { + pemKeyCertOptions.addKeyPath(cert); + } + } + return pemKeyCertOptions; + } + + public static void configureJksKeyCertOptions(TCPSSLOptions options, TlsClientConfig.JksConfiguration configuration) { + if (configuration.path.isPresent()) { + ensureKeyCertOptionsNotSet(options); + options.setKeyCertOptions(toJksOptions(configuration)); + } + } + + public static void configurePfxKeyCertOptions(TCPSSLOptions options, TlsClientConfig.PfxConfiguration configuration) { + if (configuration.path.isPresent()) { + ensureKeyCertOptionsNotSet(options); + options.setKeyCertOptions(toPfxOptions(configuration)); + } + } + + private static void ensureKeyCertOptionsNotSet(TCPSSLOptions options) { + if (options.getKeyCertOptions() != null) { + throw new IllegalArgumentException("Key cert options have already been set"); + } + } + + private SSLConfigHelper() { + // Utility + } +} \ No newline at end of file diff --git a/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversDisabledProcessor.java b/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversDisabledProcessor.java index b2894c347c577f..6a9f0c05165418 100644 --- a/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversDisabledProcessor.java +++ b/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversDisabledProcessor.java @@ -37,9 +37,9 @@ public void disableHibernateEnversStaticInit(HibernateEnversRecorder recorder, // TODO move this to runtime init once we implement in Hibernate ORM a way // to remove entity types from the metamodel on runtime init public void checkNoExplicitActiveTrue(HibernateEnversBuildTimeConfig buildTimeConfig) { - for (var entry : buildTimeConfig.getAllPersistenceUnitConfigsAsMap().entrySet()) { + for (var entry : buildTimeConfig.persistenceUnits().entrySet()) { var config = entry.getValue(); - if (config.active.isPresent() && config.active.get()) { + if (config.active().isPresent() && config.active().get()) { var puName = entry.getKey(); String enabledPropertyKey = HibernateEnversBuildTimeConfig.extensionPropertyKey("enabled"); String activePropertyKey = HibernateEnversBuildTimeConfig.persistenceUnitPropertyKey(puName, "active"); diff --git a/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversEnabled.java b/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversEnabled.java index 5451c0d2828e3c..035673f2b66eb2 100644 --- a/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversEnabled.java +++ b/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversEnabled.java @@ -18,7 +18,7 @@ public class HibernateEnversEnabled implements BooleanSupplier { @Override public boolean getAsBoolean() { - return config.enabled; + return config.enabled(); } } diff --git a/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversProcessor.java b/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversProcessor.java index a3b4d43cd46a43..d4f135cbee5c73 100644 --- a/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversProcessor.java +++ b/extensions/hibernate-envers/deployment/src/main/java/io/quarkus/hibernate/envers/deployment/HibernateEnversProcessor.java @@ -40,10 +40,10 @@ public void registerEnversReflections(BuildProducer re .produce(ReflectiveClassBuildItem.builder("org.hibernate.envers.DefaultTrackingModifiedEntitiesRevisionEntity") .methods().build()); - for (HibernateEnversBuildTimeConfigPersistenceUnit pu : buildTimeConfig.getAllPersistenceUnitConfigsAsMap().values()) { - pu.revisionListener.ifPresent( + for (HibernateEnversBuildTimeConfigPersistenceUnit pu : buildTimeConfig.persistenceUnits().values()) { + pu.revisionListener().ifPresent( s -> reflectiveClass.produce(ReflectiveClassBuildItem.builder(s).methods().fields().build())); - pu.auditStrategy.ifPresent( + pu.auditStrategy().ifPresent( s -> reflectiveClass.produce(ReflectiveClassBuildItem.builder(s).methods().fields().build())); } } diff --git a/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfig.java b/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfig.java index 5c48492b5508e2..e88e1c94d6758b 100644 --- a/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfig.java +++ b/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfig.java @@ -1,17 +1,19 @@ package io.quarkus.hibernate.envers; import java.util.Map; -import java.util.TreeMap; import io.quarkus.hibernate.orm.runtime.PersistenceUnitUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; +@ConfigMapping(prefix = "quarkus.hibernate-envers") @ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public class HibernateEnversBuildTimeConfig { +public interface HibernateEnversBuildTimeConfig { /** * Whether Hibernate Envers is enabled during the build. * @@ -23,37 +25,22 @@ public class HibernateEnversBuildTimeConfig { * * @asciidoclet */ - @ConfigItem(defaultValue = "true") - public boolean enabled; + @WithDefault("true") + boolean enabled(); /** - * Configuration for the default persistence unit. + * Configuration for persistence units. */ - @ConfigItem(name = ConfigItem.PARENT) - public HibernateEnversBuildTimeConfigPersistenceUnit defaultPersistenceUnit; - - /** - * Configuration for additional named persistence units. - */ - @ConfigDocSection + @WithParentName + @WithUnnamedKey(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME) @ConfigDocMapKey("persistence-unit-name") - @ConfigItem(name = ConfigItem.PARENT) - public Map persistenceUnits; - - public Map getAllPersistenceUnitConfigsAsMap() { - Map map = new TreeMap<>(); - if (defaultPersistenceUnit != null) { - map.put(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME, defaultPersistenceUnit); - } - map.putAll(persistenceUnits); - return map; - } + Map persistenceUnits(); - public static String extensionPropertyKey(String radical) { + static String extensionPropertyKey(String radical) { return "quarkus.hibernate-envers." + radical; } - public static String persistenceUnitPropertyKey(String persistenceUnitName, String radical) { + static String persistenceUnitPropertyKey(String persistenceUnitName, String radical) { StringBuilder keyBuilder = new StringBuilder("quarkus.hibernate-envers."); if (!PersistenceUnitUtil.isDefaultPersistenceUnit(persistenceUnitName)) { keyBuilder.append("\"").append(persistenceUnitName).append("\"."); diff --git a/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfigPersistenceUnit.java b/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfigPersistenceUnit.java index 8645657bd5037c..bdf85ee85615f2 100644 --- a/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfigPersistenceUnit.java +++ b/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversBuildTimeConfigPersistenceUnit.java @@ -2,11 +2,12 @@ import java.util.Optional; +import io.quarkus.runtime.annotations.ConfigDocDefault; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class HibernateEnversBuildTimeConfigPersistenceUnit { +public interface HibernateEnversBuildTimeConfigPersistenceUnit { /** * Whether Hibernate Envers should be active for this persistence unit at runtime. @@ -24,164 +25,163 @@ public class HibernateEnversBuildTimeConfigPersistenceUnit { * * @asciidoclet */ - @ConfigItem(defaultValueDocumentation = "'true' if Hibernate ORM is enabled; 'false' otherwise") - public Optional active = Optional.empty(); + @ConfigDocDefault("'true' if Hibernate ORM is enabled; 'false' otherwise") + Optional active(); /** * Enable store_data_at_delete feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#STORE_DATA_AT_DELETE}. */ - @ConfigItem(defaultValue = "false") - public boolean storeDataAtDelete; + @WithDefault("false") + boolean storeDataAtDelete(); /** * Defines a suffix for historical data table. Defaults to {@literal _AUD}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#AUDIT_TABLE_SUFFIX}. */ - @ConfigItem(defaultValue = "_AUD") - public Optional auditTableSuffix; + @WithDefault("_AUD") + Optional auditTableSuffix(); /** * Defines a prefix for historical data table. Default is the empty string. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#AUDIT_TABLE_PREFIX}. */ - @ConfigItem(defaultValue = "") - public Optional auditTablePrefix; + @WithDefault("") + Optional auditTablePrefix(); /** * Revision field name. Defaults to {@literal REV}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#REVISION_FIELD_NAME}. */ - @ConfigItem(defaultValue = "REV") - public Optional revisionFieldName; + @WithDefault("REV") + Optional revisionFieldName(); /** * Revision type field name. Defaults to {@literal REVTYPE}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#REVISION_TYPE_FIELD_NAME}. */ - @ConfigItem(defaultValue = "REVTYPE") - public Optional revisionTypeFieldName; + @WithDefault("REVTYPE") + Optional revisionTypeFieldName(); /** * Enable the revision_on_collection_change feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#REVISION_ON_COLLECTION_CHANGE}. */ - @ConfigItem(defaultValue = "true") - public boolean revisionOnCollectionChange; + @WithDefault("true") + boolean revisionOnCollectionChange(); /** * Enable the do_not_audit_optimistic_locking_field feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#DO_NOT_AUDIT_OPTIMISTIC_LOCKING_FIELD}. */ - @ConfigItem(defaultValue = "true") - public boolean doNotAuditOptimisticLockingField; + @WithDefault("true") + boolean doNotAuditOptimisticLockingField(); /** * Defines the default schema of where audit tables are to be created. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#DEFAULT_SCHEMA}. */ - @ConfigItem(defaultValue = "") - public Optional defaultSchema; + @WithDefault("") + Optional defaultSchema(); /** * Defines the default catalog of where audit tables are to be created. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#DEFAULT_CATALOG}. */ - @ConfigItem(defaultValue = "") - public Optional defaultCatalog; + @WithDefault("") + Optional defaultCatalog(); /** * Enables the track_entities_changed_in_revision feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#TRACK_ENTITIES_CHANGED_IN_REVISION}. */ - @ConfigItem(defaultValue = "false") - public boolean trackEntitiesChangedInRevision; + @WithDefault("false") + boolean trackEntitiesChangedInRevision(); /** * Enables the use_revision_entity_with_native_id feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#USE_REVISION_ENTITY_WITH_NATIVE_ID}. */ - @ConfigItem(defaultValue = "true") - public boolean useRevisionEntityWithNativeId; + @WithDefault("true") + boolean useRevisionEntityWithNativeId(); /** * Enables the global_with_modified_flag feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#GLOBAL_WITH_MODIFIED_FLAG}. */ - @ConfigItem(defaultValue = "false") - public boolean globalWithModifiedFlag; + @WithDefault("false") + boolean globalWithModifiedFlag(); /** * Defines the suffix to be used for modified flag columns. Defaults to {@literal _MOD}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#MODIFIED_FLAG_SUFFIX} */ - @ConfigItem(defaultValue = "_MOD") - public Optional modifiedFlagSuffix; + @WithDefault("_MOD") + Optional modifiedFlagSuffix(); /** * Defines the fully qualified class name of a user defined revision listener. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#REVISION_LISTENER}. */ - @ConfigItem - public Optional revisionListener; + Optional revisionListener(); /** * Defines the fully qualified class name of the audit strategy to be used. * * Maps to {@link org.hibernate.envers.configuration.EnversSettings#AUDIT_STRATEGY}. */ - @ConfigItem(defaultValue = "org.hibernate.envers.strategy.DefaultAuditStrategy") - public Optional auditStrategy; + @WithDefault("org.hibernate.envers.strategy.DefaultAuditStrategy") + Optional auditStrategy(); /** * Defines the property name for the audit entity's composite primary key. Defaults to {@literal originalId}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#ORIGINAL_ID_PROP_NAME}. */ - @ConfigItem(defaultValue = "originalId") - public Optional originalIdPropName; + @WithDefault("originalId") + Optional originalIdPropName(); /** * Defines the column name that holds the end revision number in audit entities. Defaults to {@literal REVEND}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#AUDIT_STRATEGY_VALIDITY_END_REV_FIELD_NAME}. */ - @ConfigItem(defaultValue = "REVEND") - public Optional auditStrategyValidityEndRevFieldName; + @WithDefault("REVEND") + Optional auditStrategyValidityEndRevFieldName(); /** * Enables the audit_strategy_validity_store_revend_timestamp feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#AUDIT_STRATEGY_VALIDITY_STORE_REVEND_TIMESTAMP}. */ - @ConfigItem(defaultValue = "false") - public boolean auditStrategyValidityStoreRevendTimestamp; + @WithDefault("false") + boolean auditStrategyValidityStoreRevendTimestamp(); /** * Defines the column name of the revision end timestamp in the audit tables. Defaults to {@literal REVEND_TSTMP}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#AUDIT_STRATEGY_VALIDITY_REVEND_TIMESTAMP_FIELD_NAME}. */ - @ConfigItem(defaultValue = "REVEND_TSTMP") - public Optional auditStrategyValidityRevendTimestampFieldName; + @WithDefault("REVEND_TSTMP") + Optional auditStrategyValidityRevendTimestampFieldName(); /** * Defines the name of the column used for storing collection ordinal values for embeddable elements. * Defaults to {@literal SETORDINAL}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#EMBEDDABLE_SET_ORDINAL_FIELD_NAME}. */ - @ConfigItem(defaultValue = "SETORDINAL") - public Optional embeddableSetOrdinalFieldName; + @WithDefault("SETORDINAL") + Optional embeddableSetOrdinalFieldName(); /** * Enables the allow_identifier_reuse feature. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#ALLOW_IDENTIFIER_REUSE}. */ - @ConfigItem(defaultValue = "false") - public boolean allowIdentifierReuse; + @WithDefault("false") + boolean allowIdentifierReuse(); /** * Defines the naming strategy to be used for modified columns. * Defaults to {@literal org.hibernate.envers.boot.internal.LegacyModifiedColumnNamingStrategy}. * Maps to {@link org.hibernate.envers.configuration.EnversSettings#MODIFIED_COLUMN_NAMING_STRATEGY}. */ - @ConfigItem(defaultValue = "org.hibernate.envers.boot.internal.LegacyModifiedColumnNamingStrategy") - public Optional modifiedColumnNamingStrategy; + @WithDefault("org.hibernate.envers.boot.internal.LegacyModifiedColumnNamingStrategy") + Optional modifiedColumnNamingStrategy(); } diff --git a/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversRecorder.java b/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversRecorder.java index 6bcc24e51088fa..3c32373213baed 100644 --- a/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversRecorder.java +++ b/extensions/hibernate-envers/runtime/src/main/java/io/quarkus/hibernate/envers/HibernateEnversRecorder.java @@ -31,48 +31,48 @@ private HibernateEnversIntegrationStaticInitListener(HibernateEnversBuildTimeCon @Override public void contributeBootProperties(BiConsumer propertyCollector) { - var puConfig = buildTimeConfig.getAllPersistenceUnitConfigsAsMap().get(puName); + var puConfig = buildTimeConfig.persistenceUnits().get(puName); if (puConfig == null) { // Leave Envers unconfigured, but still activate it. return; } - if (puConfig.active.isPresent() && !puConfig.active.get()) { + if (puConfig.active().isPresent() && !puConfig.active().get()) { propertyCollector.accept(EnversService.INTEGRATION_ENABLED, "false"); // Do not process other properties: Hibernate Envers is inactive anyway. return; } - addConfig(propertyCollector, EnversSettings.STORE_DATA_AT_DELETE, puConfig.storeDataAtDelete); - addConfig(propertyCollector, EnversSettings.AUDIT_TABLE_SUFFIX, puConfig.auditTableSuffix); - addConfig(propertyCollector, EnversSettings.AUDIT_TABLE_PREFIX, puConfig.auditTablePrefix); - addConfig(propertyCollector, EnversSettings.REVISION_FIELD_NAME, puConfig.revisionFieldName); - addConfig(propertyCollector, EnversSettings.REVISION_TYPE_FIELD_NAME, puConfig.revisionTypeFieldName); + addConfig(propertyCollector, EnversSettings.STORE_DATA_AT_DELETE, puConfig.storeDataAtDelete()); + addConfig(propertyCollector, EnversSettings.AUDIT_TABLE_SUFFIX, puConfig.auditTableSuffix()); + addConfig(propertyCollector, EnversSettings.AUDIT_TABLE_PREFIX, puConfig.auditTablePrefix()); + addConfig(propertyCollector, EnversSettings.REVISION_FIELD_NAME, puConfig.revisionFieldName()); + addConfig(propertyCollector, EnversSettings.REVISION_TYPE_FIELD_NAME, puConfig.revisionTypeFieldName()); addConfig(propertyCollector, EnversSettings.REVISION_ON_COLLECTION_CHANGE, - puConfig.revisionOnCollectionChange); + puConfig.revisionOnCollectionChange()); addConfig(propertyCollector, EnversSettings.DO_NOT_AUDIT_OPTIMISTIC_LOCKING_FIELD, - puConfig.doNotAuditOptimisticLockingField); - addConfig(propertyCollector, EnversSettings.DEFAULT_SCHEMA, puConfig.defaultSchema); - addConfig(propertyCollector, EnversSettings.DEFAULT_CATALOG, puConfig.defaultCatalog); + puConfig.doNotAuditOptimisticLockingField()); + addConfig(propertyCollector, EnversSettings.DEFAULT_SCHEMA, puConfig.defaultSchema()); + addConfig(propertyCollector, EnversSettings.DEFAULT_CATALOG, puConfig.defaultCatalog()); addConfig(propertyCollector, EnversSettings.TRACK_ENTITIES_CHANGED_IN_REVISION, - puConfig.trackEntitiesChangedInRevision); + puConfig.trackEntitiesChangedInRevision()); addConfig(propertyCollector, EnversSettings.USE_REVISION_ENTITY_WITH_NATIVE_ID, - puConfig.useRevisionEntityWithNativeId); - addConfig(propertyCollector, EnversSettings.GLOBAL_WITH_MODIFIED_FLAG, puConfig.globalWithModifiedFlag); - addConfig(propertyCollector, EnversSettings.MODIFIED_FLAG_SUFFIX, puConfig.modifiedFlagSuffix); - addConfigIfPresent(propertyCollector, EnversSettings.REVISION_LISTENER, puConfig.revisionListener); - addConfigIfPresent(propertyCollector, EnversSettings.AUDIT_STRATEGY, puConfig.auditStrategy); - addConfigIfPresent(propertyCollector, EnversSettings.ORIGINAL_ID_PROP_NAME, puConfig.originalIdPropName); + puConfig.useRevisionEntityWithNativeId()); + addConfig(propertyCollector, EnversSettings.GLOBAL_WITH_MODIFIED_FLAG, puConfig.globalWithModifiedFlag()); + addConfig(propertyCollector, EnversSettings.MODIFIED_FLAG_SUFFIX, puConfig.modifiedFlagSuffix()); + addConfigIfPresent(propertyCollector, EnversSettings.REVISION_LISTENER, puConfig.revisionListener()); + addConfigIfPresent(propertyCollector, EnversSettings.AUDIT_STRATEGY, puConfig.auditStrategy()); + addConfigIfPresent(propertyCollector, EnversSettings.ORIGINAL_ID_PROP_NAME, puConfig.originalIdPropName()); addConfigIfPresent(propertyCollector, EnversSettings.AUDIT_STRATEGY_VALIDITY_END_REV_FIELD_NAME, - puConfig.auditStrategyValidityEndRevFieldName); + puConfig.auditStrategyValidityEndRevFieldName()); addConfig(propertyCollector, EnversSettings.AUDIT_STRATEGY_VALIDITY_STORE_REVEND_TIMESTAMP, - puConfig.auditStrategyValidityStoreRevendTimestamp); + puConfig.auditStrategyValidityStoreRevendTimestamp()); addConfigIfPresent(propertyCollector, EnversSettings.AUDIT_STRATEGY_VALIDITY_REVEND_TIMESTAMP_FIELD_NAME, - puConfig.auditStrategyValidityRevendTimestampFieldName); + puConfig.auditStrategyValidityRevendTimestampFieldName()); addConfigIfPresent(propertyCollector, EnversSettings.EMBEDDABLE_SET_ORDINAL_FIELD_NAME, - puConfig.embeddableSetOrdinalFieldName); - addConfig(propertyCollector, EnversSettings.ALLOW_IDENTIFIER_REUSE, puConfig.allowIdentifierReuse); + puConfig.embeddableSetOrdinalFieldName()); + addConfig(propertyCollector, EnversSettings.ALLOW_IDENTIFIER_REUSE, puConfig.allowIdentifierReuse()); addConfigIfPresent(propertyCollector, EnversSettings.MODIFIED_COLUMN_NAMING_STRATEGY, - puConfig.modifiedColumnNamingStrategy); + puConfig.modifiedColumnNamingStrategy()); } public static void addConfig(BiConsumer propertyCollector, String configPath, T value) { diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateConfigUtil.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateConfigUtil.java index 6e593f48a71de8..861a6f9d099ab5 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateConfigUtil.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateConfigUtil.java @@ -17,17 +17,17 @@ public class HibernateConfigUtil { public static Map getCacheConfigEntries(HibernateOrmConfigPersistenceUnit config) { Map cacheRegionsConfigEntries = new HashMap<>(); - for (Map.Entry regionEntry : config.cache.entrySet()) { + for (Map.Entry regionEntry : config.cache().entrySet()) { String regionName = regionEntry.getKey(); HibernateOrmConfigPersistenceUnitCache cacheConfig = regionEntry.getValue(); - if (cacheConfig.expiration.maxIdle.isPresent()) { + if (cacheConfig.expiration().maxIdle().isPresent()) { cacheRegionsConfigEntries.put(getCacheConfigKey(regionName, EXPIRATION_MAX_IDLE), - String.valueOf(cacheConfig.expiration.maxIdle.get().getSeconds())); + String.valueOf(cacheConfig.expiration().maxIdle().get().getSeconds())); } - if (cacheConfig.memory.objectCount.isPresent()) { + if (cacheConfig.memory().objectCount().isPresent()) { cacheRegionsConfigEntries.put(getCacheConfigKey(regionName, MEMORY_OBJECT_COUNT), - String.valueOf(cacheConfig.memory.objectCount.getAsLong())); + String.valueOf(cacheConfig.memory().objectCount().getAsLong())); } } diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmCdiProcessor.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmCdiProcessor.java index fc207f65444c32..7a17d32e38408e 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmCdiProcessor.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmCdiProcessor.java @@ -1,5 +1,6 @@ package io.quarkus.hibernate.orm.deployment; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -11,6 +12,8 @@ import jakarta.enterprise.inject.Default; import jakarta.enterprise.inject.Instance; import jakarta.inject.Singleton; +import jakarta.persistence.AttributeConverter; +import jakarta.transaction.TransactionManager; import org.hibernate.Session; import org.hibernate.SessionFactory; @@ -31,6 +34,7 @@ import io.quarkus.arc.deployment.BeanDefiningAnnotationBuildItem; import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.arc.deployment.SyntheticBeanBuildItem.ExtendedBeanConfigurator; +import io.quarkus.arc.deployment.UnremovableBeanBuildItem; import io.quarkus.arc.deployment.ValidationPhaseBuildItem; import io.quarkus.arc.processor.AnnotationsTransformer; import io.quarkus.arc.processor.DotNames; @@ -42,12 +46,16 @@ import io.quarkus.deployment.annotations.BuildSteps; import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; +import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.hibernate.orm.PersistenceUnit; import io.quarkus.hibernate.orm.runtime.HibernateOrmRecorder; import io.quarkus.hibernate.orm.runtime.HibernateOrmRuntimeConfig; import io.quarkus.hibernate.orm.runtime.JPAConfig; import io.quarkus.hibernate.orm.runtime.PersistenceUnitUtil; +import io.quarkus.hibernate.orm.runtime.RequestScopedSessionHolder; +import io.quarkus.hibernate.orm.runtime.RequestScopedStatelessSessionHolder; import io.quarkus.hibernate.orm.runtime.TransactionSessions; +import io.quarkus.hibernate.orm.runtime.cdi.QuarkusArcBeanContainer; @BuildSteps(onlyIf = HibernateOrmEnabled.class) public class HibernateOrmCdiProcessor { @@ -182,7 +190,8 @@ void generateDataSourceBeans(HibernateOrmRecorder recorder, .addInjectionPoint(ClassType.create(DotName.createSimple(JPAConfig.class))) .done()); - if (capabilities.isPresent(Capability.TRANSACTIONS)) { + if (capabilities.isPresent(Capability.TRANSACTIONS) + && capabilities.isMissing(Capability.HIBERNATE_REACTIVE)) { // Do register a Session/EntityManager bean only if JTA is available // Note that the Hibernate Reactive extension excludes JTA intentionally syntheticBeanBuildItemBuildProducer @@ -222,7 +231,8 @@ void generateDataSourceBeans(HibernateOrmRecorder recorder, .addInjectionPoint(ClassType.create(DotName.createSimple(JPAConfig.class))) .done()); - if (capabilities.isPresent(Capability.TRANSACTIONS)) { + if (capabilities.isPresent(Capability.TRANSACTIONS) + && capabilities.isMissing(Capability.HIBERNATE_REACTIVE)) { // Do register a Session/EntityManager bean only if JTA is available // Note that the Hibernate Reactive extension excludes JTA intentionally syntheticBeanBuildItemBuildProducer @@ -245,6 +255,40 @@ void generateDataSourceBeans(HibernateOrmRecorder recorder, } } + @BuildStep + void registerBeans(HibernateOrmConfig hibernateOrmConfig, + BuildProducer additionalBeans, + BuildProducer unremovableBeans, + Capabilities capabilities, + CombinedIndexBuildItem combinedIndex, + List descriptors, + JpaModelBuildItem jpaModel) { + if (!HibernateOrmProcessor.hasEntities(jpaModel)) { + return; + } + + List> unremovableClasses = new ArrayList<>(); + unremovableClasses.add(QuarkusArcBeanContainer.class); + + if (capabilities.isMissing(Capability.HIBERNATE_REACTIVE)) { + // The following beans only make sense for Hibernate ORM, not for Hibernate Reactive + + if (capabilities.isPresent(Capability.TRANSACTIONS)) { + unremovableClasses.add(TransactionManager.class); + unremovableClasses.add(TransactionSessions.class); + } + unremovableClasses.add(RequestScopedSessionHolder.class); + unremovableClasses.add(RequestScopedStatelessSessionHolder.class); + } + additionalBeans.produce(AdditionalBeanBuildItem.builder().setUnremovable() + .addBeanClasses(unremovableClasses.toArray(new Class[unremovableClasses.size()])) + .build()); + + // Some user-injectable beans are retrieved programmatically and shouldn't be removed + unremovableBeans.produce(UnremovableBeanBuildItem.beanTypes(AttributeConverter.class)); + unremovableBeans.produce(UnremovableBeanBuildItem.beanTypes(jpaModel.getPotentialCdiBeanClassNames())); + } + @BuildStep void registerAnnotations(BuildProducer additionalBeans, BuildProducer beanDefiningAnnotations) { diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfig.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfig.java index e2be11c863d715..32e60b297619c8 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfig.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfig.java @@ -9,12 +9,18 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; -import io.quarkus.runtime.annotations.ConvertWith; - +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithConverter; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithName; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; + +@ConfigMapping(prefix = "quarkus.hibernate-orm") @ConfigRoot -public class HibernateOrmConfig { +public interface HibernateOrmConfig { /** * Whether Hibernate ORM is enabled *during the build*. @@ -25,49 +31,51 @@ public class HibernateOrmConfig { * * @asciidoclet */ - @ConfigItem(defaultValue = "true") - public boolean enabled; + @WithDefault("true") + boolean enabled(); /** * Database related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigDatabase database; - - /** - * Configuration for the default persistence unit. - */ - @ConfigItem(name = ConfigItem.PARENT) - public HibernateOrmConfigPersistenceUnit defaultPersistenceUnit; + HibernateOrmConfigDatabase database(); /** - * Additional named persistence units. + * Configuration for persistence units. */ - @ConfigDocSection + @WithParentName + @WithUnnamedKey(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME) + @WithDefaults @ConfigDocMapKey("persistence-unit-name") - @ConfigItem(name = ConfigItem.PARENT) - public Map persistenceUnits; + Map persistenceUnits(); + + default HibernateOrmConfigPersistenceUnit defaultPersistenceUnit() { + return persistenceUnits().get(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME); + } + + default Map namedPersistenceUnits() { + Map map = new TreeMap<>(); + map.putAll(persistenceUnits()); + map.remove(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME); + return map; + } /** * Configuration for the {@code persistence.xml} handling. */ - @ConfigItem - public HibernateOrmConfigPersistenceXml persistenceXml; + HibernateOrmConfigPersistenceXml persistenceXml(); /** * Logging configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigLog log; + HibernateOrmConfigLog log(); /** * Whether statistics collection is enabled. If 'metrics.enabled' is true, then the default here is * considered true, otherwise the default is false. */ - @ConfigItem - public Optional statistics; + Optional statistics(); /** * Whether session metrics should be appended into the server log for each Hibernate session. This @@ -75,73 +83,62 @@ public class HibernateOrmConfig { * (which means both `statistics` and `log-session-metrics` need to be enabled for the session metrics * to appear in the log). */ - @ConfigItem - public Optional logSessionMetrics; + Optional logSessionMetrics(); /** * Configuration related to metrics. */ - @ConfigItem - public HibernateOrmConfigMetric metrics; + HibernateOrmConfigMetric metrics(); - public boolean isAnyNonPersistenceXmlPropertySet() { + default boolean isAnyNonPersistenceXmlPropertySet() { // Do NOT include persistenceXml in here. - return defaultPersistenceUnit.isAnyPropertySet() || - !persistenceUnits.isEmpty() || - log.isAnyPropertySet() || - statistics.isPresent() || - logSessionMetrics.isPresent() || - metrics.isAnyPropertySet(); - } - - public Map getAllPersistenceUnitConfigsAsMap() { - Map map = new TreeMap<>(); - if (defaultPersistenceUnit != null) { - map.put(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME, defaultPersistenceUnit); - } - map.putAll(persistenceUnits); - return map; + return defaultPersistenceUnit().isAnyPropertySet() || + !namedPersistenceUnits().isEmpty() || + log().isAnyPropertySet() || + statistics().isPresent() || + logSessionMetrics().isPresent() || + metrics().isAnyPropertySet(); } @ConfigGroup - public static class HibernateOrmConfigPersistenceXml { + interface HibernateOrmConfigPersistenceXml { /** * If {@code true}, Quarkus will ignore any {@code persistence.xml} file in the classpath * and rely exclusively on the Quarkus configuration. */ - @ConfigItem - public boolean ignore; + @WithDefault("false") + boolean ignore(); } @ConfigGroup - public static class HibernateOrmConfigLog { + interface HibernateOrmConfigLog { /** * Logs SQL bind parameter. *

* Setting it to true is obviously not recommended in production. */ - @ConfigItem @Deprecated - public boolean bindParam; + @WithDefault("false") + boolean bindParam(); /** * Logs SQL bind parameters. *

* Setting it to true is obviously not recommended in production. */ - @ConfigItem - public boolean bindParameters; + @WithDefault("false") + boolean bindParameters(); - public boolean isAnyPropertySet() { - return bindParam || bindParameters; + default boolean isAnyPropertySet() { + return bindParam() || bindParameters(); } } @ConfigGroup - public static class HibernateOrmConfigDatabase { + interface HibernateOrmConfigDatabase { /** * When set, attempts to exchange data with the database * as the given version of Hibernate ORM would have, @@ -165,22 +162,23 @@ public static class HibernateOrmConfigDatabase { * * @asciidoclet */ - @ConfigItem(name = "orm-compatibility.version", defaultValue = "latest") - @ConvertWith(DatabaseOrmCompatibilityVersion.Converter.class) - public DatabaseOrmCompatibilityVersion ormCompatibilityVersion; + @WithName("orm-compatibility.version") + @WithDefault("latest") + @WithConverter(DatabaseOrmCompatibilityVersion.Converter.class) + DatabaseOrmCompatibilityVersion ormCompatibilityVersion(); } @ConfigGroup - public static class HibernateOrmConfigMetric { + interface HibernateOrmConfigMetric { /** * Whether metrics are published if a metrics extension is enabled. */ - @ConfigItem - public boolean enabled; + @WithDefault("false") + boolean enabled(); - public boolean isAnyPropertySet() { - return enabled; + default boolean isAnyPropertySet() { + return enabled(); } } diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfigPersistenceUnit.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfigPersistenceUnit.java index f87622b5957a0c..bdf64462f2836f 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfigPersistenceUnit.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfigPersistenceUnit.java @@ -2,7 +2,6 @@ import java.nio.charset.Charset; import java.time.Duration; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -13,38 +12,38 @@ import org.hibernate.annotations.TimeZoneStorageType; import org.hibernate.id.enhanced.StandardOptimizerDescriptor; +import io.quarkus.runtime.annotations.ConfigDocDefault; +import io.quarkus.runtime.annotations.ConfigDocIgnore; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConvertWith; import io.quarkus.runtime.configuration.TrimmedStringConverter; +import io.smallrye.config.WithConverter; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; +import io.smallrye.config.WithParentName; @ConfigGroup -public class HibernateOrmConfigPersistenceUnit { +public interface HibernateOrmConfigPersistenceUnit { /** * The name of the datasource which this persistence unit uses. *

* If undefined, it will use the default datasource. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional datasource; + @WithConverter(TrimmedStringConverter.class) + Optional datasource(); /** * The packages in which the entities affected to this persistence unit are located. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional> packages; + Optional> packages(); /** * Dialect related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitDialect dialect; + HibernateOrmConfigPersistenceUnitDialect dialect(); // @formatter:off /** @@ -83,9 +82,8 @@ public class HibernateOrmConfigPersistenceUnit { * @asciidoclet */ // @formatter:on - @ConfigItem(defaultValueDocumentation = "import.sql in DEV, TEST ; no-file otherwise") - @ConvertWith(TrimmedStringConverter.class) - public Optional> sqlLoadScript; + @ConfigDocDefault("import.sql in DEV, TEST ; no-file otherwise") + Optional> sqlLoadScript(); /** * The size of the batches used when loading entities and collections. @@ -95,9 +93,9 @@ public class HibernateOrmConfigPersistenceUnit { * @deprecated {@link #fetch} should be used to configure fetching properties. * @asciidoclet */ - @ConfigItem(defaultValueDocumentation = "16") + @ConfigDocDefault("16") @Deprecated - public OptionalInt batchFetchSize; + OptionalInt batchFetchSize(); /** * The maximum depth of outer join fetch tree for single-ended associations (one-to-one, many-to-one). @@ -107,27 +105,24 @@ public class HibernateOrmConfigPersistenceUnit { * @deprecated {@link #fetch} should be used to configure fetching properties. * @asciidoclet */ - @ConfigItem @Deprecated - public OptionalInt maxFetchDepth; + OptionalInt maxFetchDepth(); /** * Pluggable strategy contract for applying physical naming rules for database object names. * * Class name of the Hibernate PhysicalNamingStrategy implementation */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional physicalNamingStrategy; + @WithConverter(TrimmedStringConverter.class) + Optional physicalNamingStrategy(); /** * Pluggable strategy for applying implicit naming rules when an explicit name is not given. * * Class name of the Hibernate ImplicitNamingStrategy implementation */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional implicitNamingStrategy; + @WithConverter(TrimmedStringConverter.class) + Optional implicitNamingStrategy(); /** * Class name of a custom @@ -145,9 +140,8 @@ public class HibernateOrmConfigPersistenceUnit { * * @asciidoclet */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional metadataBuilderContributor; + @WithConverter(TrimmedStringConverter.class) + Optional metadataBuilderContributor(); /** * XML files to configure the entity mapping, e.g. {@code META-INF/my-orm.xml}. @@ -155,64 +149,55 @@ public class HibernateOrmConfigPersistenceUnit { * Defaults to `META-INF/orm.xml` if it exists. * Pass `no-file` to force Hibernate ORM to ignore `META-INF/orm.xml`. */ - @ConfigItem(defaultValueDocumentation = "META-INF/orm.xml if it exists; no-file otherwise") - @ConvertWith(TrimmedStringConverter.class) - public Optional> mappingFiles; + @ConfigDocDefault("META-INF/orm.xml if it exists; no-file otherwise") + Optional> mappingFiles(); /** * Mapping configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitMapping mapping; + HibernateOrmConfigPersistenceUnitMapping mapping(); /** * Query related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitQuery query; + HibernateOrmConfigPersistenceUnitQuery query(); /** * Database related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitDatabase database; + HibernateOrmConfigPersistenceUnitDatabase database(); /** * JDBC related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitJdbc jdbc; + HibernateOrmConfigPersistenceUnitJdbc jdbc(); /** * Fetching logic configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitFetch fetch; + HibernateOrmConfigPersistenceUnitFetch fetch(); /** * Caching configuration */ - @ConfigItem @ConfigDocSection - public Map cache; + Map cache(); /** * Discriminator related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitDiscriminator discriminator; + HibernateOrmConfigPersistenceUnitDiscriminator discriminator(); /** * Config related to identifier quoting. */ - @ConfigItem(defaultValue = "none") - public HibernateOrmConfigPersistenceUnitQuoteIdentifiers quoteIdentifiers; + HibernateOrmConfigPersistenceUnitQuoteIdentifiers quoteIdentifiers(); /** * The default in Quarkus is for 2nd level caching to be enabled, @@ -222,14 +207,13 @@ public class HibernateOrmConfigPersistenceUnit { *

* Set this to false to disable all 2nd level caches. */ - @ConfigItem(defaultValue = "true") - public boolean secondLevelCachingEnabled; + @WithDefault("true") + boolean secondLevelCachingEnabled(); /** * Bean Validation configuration. */ - @ConfigItem - public HibernateOrmConfigPersistenceValidation validation; + HibernateOrmConfigPersistenceValidation validation(); /** * Defines the method for multi-tenancy (DATABASE, NONE, SCHEMA). The complete list of allowed values is available in the @@ -239,56 +223,54 @@ public class HibernateOrmConfigPersistenceUnit { * * @asciidoclet */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional multitenant; + @WithConverter(TrimmedStringConverter.class) + Optional multitenant(); /** * Defines the name of the datasource to use in case of SCHEMA approach. The datasource of the persistence unit will be used * if not set. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional multitenantSchemaDatasource; + @WithConverter(TrimmedStringConverter.class) + Optional multitenantSchemaDatasource(); /** * If hibernate is not auto generating the schema, and Quarkus is running in development mode * then Quarkus will attempt to validate the database after startup and print a log message if * there are any problems. */ - @ConfigItem(defaultValue = "true") - public boolean validateInDevMode; + @WithDefault("true") + boolean validateInDevMode(); - @ConfigItem(generateDocumentation = false) + @ConfigDocIgnore @ConfigDocMapKey("full-property-key") - public Map unsupportedProperties = new HashMap<>(); - - public boolean isAnyPropertySet() { - return datasource.isPresent() || - packages.isPresent() || - dialect.isAnyPropertySet() || - sqlLoadScript.isPresent() || - batchFetchSize.isPresent() || - maxFetchDepth.isPresent() || - physicalNamingStrategy.isPresent() || - implicitNamingStrategy.isPresent() || - metadataBuilderContributor.isPresent() || - mapping.isAnyPropertySet() || - query.isAnyPropertySet() || - database.isAnyPropertySet() || - jdbc.isAnyPropertySet() || - !cache.isEmpty() || - !secondLevelCachingEnabled || - multitenant.isPresent() || - multitenantSchemaDatasource.isPresent() || - fetch.isAnyPropertySet() || - discriminator.isAnyPropertySet() || - quoteIdentifiers.isAnyPropertySet() || - !unsupportedProperties.isEmpty(); + Map unsupportedProperties(); + + default boolean isAnyPropertySet() { + return datasource().isPresent() || + packages().isPresent() || + dialect().isAnyPropertySet() || + sqlLoadScript().isPresent() || + batchFetchSize().isPresent() || + maxFetchDepth().isPresent() || + physicalNamingStrategy().isPresent() || + implicitNamingStrategy().isPresent() || + metadataBuilderContributor().isPresent() || + mapping().isAnyPropertySet() || + query().isAnyPropertySet() || + database().isAnyPropertySet() || + jdbc().isAnyPropertySet() || + !cache().isEmpty() || + !secondLevelCachingEnabled() || + multitenant().isPresent() || + multitenantSchemaDatasource().isPresent() || + fetch().isAnyPropertySet() || + discriminator().isAnyPropertySet() || + quoteIdentifiers().isAnyPropertySet() || + !unsupportedProperties().isEmpty(); } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitDialect { + interface HibernateOrmConfigPersistenceUnitDialect { /** * Class name of the Hibernate ORM dialect. @@ -313,9 +295,10 @@ public static class HibernateOrmConfigPersistenceUnitDialect { * * @asciidoclet */ - @ConfigItem(name = ConfigItem.PARENT, defaultValueDocumentation = "selected automatically for most popular databases") - @ConvertWith(TrimmedStringConverter.class) - public Optional dialect; + @WithParentName + @ConfigDocDefault("selected automatically for most popular databases") + @WithConverter(TrimmedStringConverter.class) + Optional dialect(); /** * The storage engine to use when the dialect supports multiple storage engines. @@ -324,12 +307,11 @@ public static class HibernateOrmConfigPersistenceUnitDialect { * * @asciidoclet */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional storageEngine; + @WithConverter(TrimmedStringConverter.class) + Optional storageEngine(); - public boolean isAnyPropertySet() { - return dialect.isPresent() || storageEngine.isPresent(); + default boolean isAnyPropertySet() { + return dialect().isPresent() || storageEngine().isPresent(); } } @@ -337,21 +319,19 @@ public boolean isAnyPropertySet() { * Mapping-related configuration. */ @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitMapping { + interface HibernateOrmConfigPersistenceUnitMapping { /** * Timezone configuration. */ - @ConfigItem - public Timezone timezone; + Timezone timezone(); /** * Optimizer configuration. */ - @ConfigItem - public Id id; + Id id(); @ConfigGroup - public static class Timezone { + interface Timezone { /** * How to store timezones in the database by default * for properties of type `OffsetDateTime` and `ZonedDateTime`. @@ -391,20 +371,20 @@ public static class Timezone { * * @asciidoclet */ - @ConfigItem(name = "default-storage", defaultValueDocumentation = "default") - public Optional timeZoneDefaultStorage; + @WithName("default-storage") + @ConfigDocDefault("default") + Optional timeZoneDefaultStorage(); } @ConfigGroup - public static class Id { + interface Id { /** * Optimizer configuration. */ - @ConfigItem - public Optimizer optimizer; + Optimizer optimizer(); @ConfigGroup - public static class Optimizer { + interface Optimizer { /** * The optimizer to apply to identifier generators * whose optimizer is not configured explicitly. @@ -418,24 +398,25 @@ public static class Optimizer { * * @asciidoclet */ - @ConfigItem(name = "default", defaultValueDocumentation = "pooled-lo") + @WithName("default") + @ConfigDocDefault("pooled-lo") // Note this needs to be a build-time property due to // org.hibernate.boot.internal.InFlightMetadataCollectorImpl.handleIdentifierValueBinding // which may call (indirectly) org.hibernate.id.enhanced.SequenceStructure.buildSequence // whose output depends on org.hibernate.id.enhanced.SequenceStructure.applyIncrementSizeToSourceValues // which is determined by the optimizer. - public Optional idOptimizerDefault; + Optional idOptimizerDefault(); } } - public boolean isAnyPropertySet() { - return timezone.timeZoneDefaultStorage.isPresent() - || id.optimizer.idOptimizerDefault.isPresent(); + default boolean isAnyPropertySet() { + return timezone().timeZoneDefaultStorage().isPresent() + || id().optimizer().idOptimizerDefault().isPresent(); } } - public enum IdOptimizerType { + enum IdOptimizerType { /** * Assumes the value retrieved from the table/sequence is the lower end of the pool. * @@ -489,11 +470,11 @@ public enum IdOptimizerType { } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitQuery { + interface HibernateOrmConfigPersistenceUnitQuery { - private static final int DEFAULT_QUERY_PLAN_CACHE_MAX_SIZE = 2048; + int DEFAULT_QUERY_PLAN_CACHE_MAX_SIZE = 2048; - public enum NullOrdering { + enum NullOrdering { NONE, FIRST, LAST @@ -503,8 +484,8 @@ public enum NullOrdering { * The maximum size of the query plan cache. * see #{@value org.hibernate.cfg.AvailableSettings#QUERY_PLAN_CACHE_MAX_SIZE} */ - @ConfigItem(defaultValue = "2048") - public int queryPlanCacheMaxSize; + @WithDefault("2048") + int queryPlanCacheMaxSize(); /** * Default precedence of null values in `ORDER BY` clauses. @@ -513,114 +494,107 @@ public enum NullOrdering { * * @asciidoclet */ - @ConfigItem(defaultValue = "none") - public NullOrdering defaultNullOrdering; + @WithDefault("none") + NullOrdering defaultNullOrdering(); /** * Enables IN clause parameter padding which improves statement caching. */ - @ConfigItem(defaultValue = "true") - public boolean inClauseParameterPadding; + @WithDefault("true") + boolean inClauseParameterPadding(); - public boolean isAnyPropertySet() { - return queryPlanCacheMaxSize != DEFAULT_QUERY_PLAN_CACHE_MAX_SIZE - || defaultNullOrdering != NullOrdering.NONE - || !inClauseParameterPadding; + default boolean isAnyPropertySet() { + return queryPlanCacheMaxSize() != DEFAULT_QUERY_PLAN_CACHE_MAX_SIZE + || defaultNullOrdering() != NullOrdering.NONE + || !inClauseParameterPadding(); } } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitDatabase { + interface HibernateOrmConfigPersistenceUnitDatabase { - private static final String DEFAULT_CHARSET = "UTF-8"; + String DEFAULT_CHARSET = "UTF-8"; /** * The charset of the database. *

* Used for DDL generation and also for the SQL import scripts. */ - @ConfigItem(defaultValue = DEFAULT_CHARSET) - public Charset charset; + @WithDefault(DEFAULT_CHARSET) + Charset charset(); /** * Whether Hibernate should quote all identifiers. * * @deprecated {@link #quoteIdentifiers} should be used to configure quoting strategy. */ - @ConfigItem @Deprecated - public boolean globallyQuotedIdentifiers; + @WithDefault("false") + boolean globallyQuotedIdentifiers(); - public boolean isAnyPropertySet() { - return !DEFAULT_CHARSET.equals(charset.name()) - || globallyQuotedIdentifiers; + default boolean isAnyPropertySet() { + return !DEFAULT_CHARSET.equals(charset().name()) + || globallyQuotedIdentifiers(); } } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitJdbc { + interface HibernateOrmConfigPersistenceUnitJdbc { /** * The time zone pushed to the JDBC driver. * * See `quarkus.hibernate-orm.mapping.timezone.default-storage`. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional timezone; + @WithConverter(TrimmedStringConverter.class) + Optional timezone(); /** * How many rows are fetched at a time by the JDBC driver. */ - @ConfigItem - public OptionalInt statementFetchSize; + OptionalInt statementFetchSize(); /** * The number of updates (inserts, updates and deletes) that are sent by the JDBC driver at one time for execution. */ - @ConfigItem - public OptionalInt statementBatchSize; + OptionalInt statementBatchSize(); - public boolean isAnyPropertySet() { - return timezone.isPresent() || statementFetchSize.isPresent() || statementBatchSize.isPresent(); + default boolean isAnyPropertySet() { + return timezone().isPresent() || statementFetchSize().isPresent() || statementBatchSize().isPresent(); } } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitCache { + interface HibernateOrmConfigPersistenceUnitCache { /** * The cache expiration configuration. */ - @ConfigItem - public HibernateOrmConfigPersistenceUnitCacheExpiration expiration; + HibernateOrmConfigPersistenceUnitCacheExpiration expiration(); /** * The cache memory storage configuration. */ - @ConfigItem - public HibernateOrmConfigPersistenceUnitCacheMemory memory; + HibernateOrmConfigPersistenceUnitCacheMemory memory(); } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitCacheExpiration { + interface HibernateOrmConfigPersistenceUnitCacheExpiration { /** * The maximum time before an object of the cache is considered expired. */ - @ConfigItem - public Optional maxIdle; + Optional maxIdle(); } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitCacheMemory { + interface HibernateOrmConfigPersistenceUnitCacheMemory { /** * The maximum number of objects kept in memory in the cache. */ - @ConfigItem - public OptionalLong objectCount; + OptionalLong objectCount(); } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitFetch { + interface HibernateOrmConfigPersistenceUnitFetch { /** * The size of the batches used when loading entities and collections. * @@ -628,8 +602,8 @@ public static class HibernateOrmConfigPersistenceUnitFetch { * * @asciidoclet */ - @ConfigItem(defaultValueDocumentation = "16") - public OptionalInt batchSize; + @ConfigDocDefault("16") + OptionalInt batchSize(); /** * The maximum depth of outer join fetch tree for single-ended associations (one-to-one, many-to-one). @@ -638,17 +612,16 @@ public static class HibernateOrmConfigPersistenceUnitFetch { * * @asciidoclet */ - @ConfigItem - public OptionalInt maxDepth; + OptionalInt maxDepth(); - public boolean isAnyPropertySet() { - return batchSize.isPresent() || maxDepth.isPresent(); + default boolean isAnyPropertySet() { + return batchSize().isPresent() || maxDepth().isPresent(); } } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitQuoteIdentifiers { + interface HibernateOrmConfigPersistenceUnitQuoteIdentifiers { /** * Identifiers can be quoted using one of the available strategies. @@ -660,11 +633,11 @@ public static class HibernateOrmConfigPersistenceUnitQuoteIdentifiers { * only * identifiers deemed SQL keywords by the Hibernate ORM dialect. */ - @ConfigItem(defaultValue = "none") - public IdentifierQuotingStrategy strategy; + @WithDefault("none") + IdentifierQuotingStrategy strategy(); - public boolean isAnyPropertySet() { - return strategy != IdentifierQuotingStrategy.NONE; + default boolean isAnyPropertySet() { + return strategy() != IdentifierQuotingStrategy.NONE; } } @@ -674,21 +647,21 @@ public boolean isAnyPropertySet() { * Separated in a group configuration, in case it is necessary to add the another existing hibernate discriminator property. */ @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitDiscriminator { + interface HibernateOrmConfigPersistenceUnitDiscriminator { /** * Existing applications rely (implicitly or explicitly) on Hibernate ignoring any DiscriminatorColumn declarations on * joined inheritance hierarchies. This setting allows these applications to maintain the legacy behavior of * DiscriminatorColumn annotations being ignored when paired with joined inheritance. */ - @ConfigItem - public boolean ignoreExplicitForJoined; + @WithDefault("false") + boolean ignoreExplicitForJoined(); - public boolean isAnyPropertySet() { - return ignoreExplicitForJoined; + default boolean isAnyPropertySet() { + return ignoreExplicitForJoined(); } } - public enum IdentifierQuotingStrategy { + enum IdentifierQuotingStrategy { NONE, ALL, ALL_EXCEPT_COLUMN_DEFINITIONS, @@ -696,13 +669,13 @@ public enum IdentifierQuotingStrategy { } @ConfigGroup - public static class HibernateOrmConfigPersistenceValidation { + interface HibernateOrmConfigPersistenceValidation { /** * Enables the Bean Validation integration. */ - @ConfigItem(defaultValue = "true") - public boolean enabled; + @WithDefault("true") + boolean enabled(); } } diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmEnabled.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmEnabled.java index b5ed9ddb3183cb..7e9d0c63c43046 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmEnabled.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmEnabled.java @@ -16,7 +16,7 @@ public class HibernateOrmEnabled implements BooleanSupplier { @Override public boolean getAsBoolean() { - return config.enabled; + return config.enabled(); } } diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java index 2ad2bb9e78ac7d..c93d00b8ce5a79 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java @@ -36,11 +36,9 @@ import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.inject.Default; -import jakarta.persistence.AttributeConverter; import jakarta.persistence.SharedCacheMode; import jakarta.persistence.ValidationMode; import jakarta.persistence.spi.PersistenceUnitTransactionType; -import jakarta.transaction.TransactionManager; import jakarta.xml.bind.JAXBElement; import org.hibernate.boot.archive.scan.spi.ClassDescriptor; @@ -66,7 +64,6 @@ import io.quarkus.agroal.spi.JdbcDataSourceBuildItem; import io.quarkus.agroal.spi.JdbcDataSourceSchemaReadyBuildItem; -import io.quarkus.arc.deployment.AdditionalBeanBuildItem; import io.quarkus.arc.deployment.BeanContainerBuildItem; import io.quarkus.arc.deployment.BeanContainerListenerBuildItem; import io.quarkus.arc.deployment.RecorderBeanInitializedBuildItem; @@ -120,15 +117,11 @@ import io.quarkus.hibernate.orm.runtime.HibernateOrmRecorder; import io.quarkus.hibernate.orm.runtime.HibernateOrmRuntimeConfig; import io.quarkus.hibernate.orm.runtime.PersistenceUnitUtil; -import io.quarkus.hibernate.orm.runtime.RequestScopedSessionHolder; -import io.quarkus.hibernate.orm.runtime.RequestScopedStatelessSessionHolder; -import io.quarkus.hibernate.orm.runtime.TransactionSessions; import io.quarkus.hibernate.orm.runtime.boot.QuarkusPersistenceUnitDefinition; import io.quarkus.hibernate.orm.runtime.boot.scan.QuarkusScanner; import io.quarkus.hibernate.orm.runtime.boot.xml.JAXBElementSubstitution; import io.quarkus.hibernate.orm.runtime.boot.xml.QNameSubstitution; import io.quarkus.hibernate.orm.runtime.boot.xml.RecordableXmlMapping; -import io.quarkus.hibernate.orm.runtime.cdi.QuarkusArcBeanContainer; import io.quarkus.hibernate.orm.runtime.config.DialectVersions; import io.quarkus.hibernate.orm.runtime.dev.HibernateOrmDevIntegrator; import io.quarkus.hibernate.orm.runtime.integration.HibernateOrmIntegrationStaticDescriptor; @@ -202,10 +195,10 @@ void checkTransactionsSupport(Capabilities capabilities, BuildProducer additionalApplicationArchiveMarkers) { - for (HibernateOrmConfigPersistenceUnit persistenceUnit : hibernateOrmConfig.getAllPersistenceUnitConfigsAsMap() + for (HibernateOrmConfigPersistenceUnit persistenceUnit : hibernateOrmConfig.persistenceUnits() .values()) { - if (persistenceUnit.packages.isPresent()) { - for (String pakkage : persistenceUnit.packages.get()) { + if (persistenceUnit.packages().isPresent()) { + for (String pakkage : persistenceUnit.packages().get()) { additionalApplicationArchiveMarkers .produce(new AdditionalApplicationArchiveMarkerBuildItem(pakkage.replace('.', '/'))); } @@ -217,8 +210,8 @@ void includeArchivesHostingEntityPackagesInIndex(HibernateOrmConfig hibernateOrm @Consume(ServiceStartBuildItem.class) @BuildStep(onlyIf = IsDevelopment.class) void warnOfSchemaProblems(HibernateOrmConfig config, HibernateOrmRecorder recorder) { - for (var e : config.getAllPersistenceUnitConfigsAsMap().entrySet()) { - if (e.getValue().validateInDevMode) { + for (var e : config.persistenceUnits().entrySet()) { + if (e.getValue().validateInDevMode()) { recorder.doValidation(e.getKey()); } } @@ -323,7 +316,7 @@ public void configurationDescriptorBuilding( getMultiTenancyStrategy( Optional.ofNullable(persistenceXmlDescriptorBuildItem.getDescriptor() .getProperties().getProperty("hibernate.multiTenancy"))), //FIXME this property is meaningless in Hibernate ORM 6 - hibernateOrmConfig.database.ormCompatibilityVersion, Collections.emptyMap()), + hibernateOrmConfig.database().ormCompatibilityVersion(), Collections.emptyMap()), null, jpaModel.getXmlMappings(persistenceXmlDescriptorBuildItem.getDescriptor().getName()), false, true, capabilities)); @@ -377,13 +370,13 @@ public void contributePersistenceXmlToJpaModel( public void contributeQuarkusConfigToJpaModel( BuildProducer jpaModelPuContributions, HibernateOrmConfig hibernateOrmConfig) { - for (Entry entry : hibernateOrmConfig.getAllPersistenceUnitConfigsAsMap() + for (Entry entry : hibernateOrmConfig.persistenceUnits() .entrySet()) { String name = entry.getKey(); HibernateOrmConfigPersistenceUnit config = entry.getValue(); jpaModelPuContributions.produce(new JpaModelPersistenceUnitContributionBuildItem( name, null, Collections.emptySet(), - config.mappingFiles.orElse(Collections.emptySet()))); + config.mappingFiles().orElse(Collections.emptySet()))); } } @@ -592,36 +585,6 @@ void handleNativeImageImportSql(BuildProducer reso } } - @BuildStep - void registerBeans(HibernateOrmConfig hibernateOrmConfig, - BuildProducer additionalBeans, - BuildProducer unremovableBeans, - Capabilities capabilities, - CombinedIndexBuildItem combinedIndex, - List descriptors, - JpaModelBuildItem jpaModel) { - if (!hasEntities(jpaModel)) { - return; - } - - List> unremovableClasses = new ArrayList<>(); - if (capabilities.isPresent(Capability.TRANSACTIONS)) { - unremovableClasses.add(TransactionManager.class); - unremovableClasses.add(TransactionSessions.class); - } - unremovableClasses.add(RequestScopedSessionHolder.class); - unremovableClasses.add(RequestScopedStatelessSessionHolder.class); - unremovableClasses.add(QuarkusArcBeanContainer.class); - - additionalBeans.produce(AdditionalBeanBuildItem.builder().setUnremovable() - .addBeanClasses(unremovableClasses.toArray(new Class[unremovableClasses.size()])) - .build()); - - // Some user-injectable beans are retrieved programmatically and shouldn't be removed - unremovableBeans.produce(UnremovableBeanBuildItem.beanTypes(AttributeConverter.class)); - unremovableBeans.produce(UnremovableBeanBuildItem.beanTypes(jpaModel.getPotentialCdiBeanClassNames())); - } - @Consume(InterceptedStaticMethodsTransformersRegisteredBuildItem.class) @BuildStep @SuppressWarnings("deprecation") @@ -743,7 +706,7 @@ public void multitenancy(HibernateOrmRecorder recorder, @BuildStep public void produceLoggingCategories(HibernateOrmConfig hibernateOrmConfig, BuildProducer categories) { - if (hibernateOrmConfig.log.bindParam || hibernateOrmConfig.log.bindParameters) { + if (hibernateOrmConfig.log().bindParam() || hibernateOrmConfig.log().bindParameters()) { categories.produce(new LogCategoryBuildItem("org.hibernate.orm.jdbc.bind", Level.TRACE, true)); } } @@ -801,7 +764,7 @@ private static List getSqlLoadScript(Optional> sqlLoadScrip } } - private boolean hasEntities(JpaModelBuildItem jpaModel) { + static boolean hasEntities(JpaModelBuildItem jpaModel) { return !jpaModel.getEntityClassNames().isEmpty(); } @@ -820,7 +783,7 @@ private void handleHibernateORMWithNoPersistenceXml( BuildProducer persistenceUnitDescriptors, List dbKindMetadataBuildItems) { if (!descriptors.isEmpty()) { - if (hibernateOrmConfig.isAnyNonPersistenceXmlPropertySet() || !hibernateOrmConfig.persistenceUnits.isEmpty()) { + if (hibernateOrmConfig.isAnyNonPersistenceXmlPropertySet()) { throw new ConfigurationException( "A legacy persistence.xml file is present in the classpath, but Hibernate ORM is also configured through the Quarkus config file.\n" + "Legacy persistence.xml files and Quarkus configuration cannot be used at the same time.\n" @@ -848,8 +811,8 @@ private void handleHibernateORMWithNoPersistenceXml( .filter(i -> i.isDefault()) .findFirst(); boolean enableDefaultPersistenceUnit = (defaultJdbcDataSource.isPresent() - && hibernateOrmConfig.persistenceUnits.isEmpty()) - || hibernateOrmConfig.defaultPersistenceUnit.isAnyPropertySet(); + && hibernateOrmConfig.namedPersistenceUnits().isEmpty()) + || hibernateOrmConfig.defaultPersistenceUnit().isAnyPropertySet(); Map> modelClassesAndPackagesPerPersistencesUnits = getModelClassesAndPackagesPerPersistenceUnits( hibernateOrmConfig, jpaModel, index.getIndex(), enableDefaultPersistenceUnit); @@ -861,15 +824,15 @@ private void handleHibernateORMWithNoPersistenceXml( if (enableDefaultPersistenceUnit) { producePersistenceUnitDescriptorFromConfig( hibernateOrmConfig, PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME, - hibernateOrmConfig.defaultPersistenceUnit, + hibernateOrmConfig.defaultPersistenceUnit(), modelClassesAndPackagesForDefaultPersistenceUnit, jpaModel.getXmlMappings(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME), jdbcDataSources, applicationArchivesBuildItem, launchMode, capabilities, systemProperties, nativeImageResources, hotDeploymentWatchedFiles, persistenceUnitDescriptors, storageEngineCollector, dbKindMetadataBuildItems); } else if (!modelClassesAndPackagesForDefaultPersistenceUnit.isEmpty() - && (!hibernateOrmConfig.defaultPersistenceUnit.datasource.isPresent() - || DataSourceUtil.isDefault(hibernateOrmConfig.defaultPersistenceUnit.datasource.get())) + && (!hibernateOrmConfig.defaultPersistenceUnit().datasource().isPresent() + || DataSourceUtil.isDefault(hibernateOrmConfig.defaultPersistenceUnit().datasource().get())) && !defaultJdbcDataSource.isPresent()) { String persistenceUnitName = PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME; String dataSourceName = DataSourceUtil.DEFAULT_DATASOURCE_NAME; @@ -877,7 +840,7 @@ private void handleHibernateORMWithNoPersistenceXml( DataSourceUtil.dataSourceNotConfigured(dataSourceName)); } - for (Entry persistenceUnitEntry : hibernateOrmConfig.persistenceUnits + for (Entry persistenceUnitEntry : hibernateOrmConfig.namedPersistenceUnits() .entrySet()) { producePersistenceUnitDescriptorFromConfig( hibernateOrmConfig, persistenceUnitEntry.getKey(), persistenceUnitEntry.getValue(), @@ -936,54 +899,55 @@ private static void producePersistenceUnitDescriptorFromConfig( descriptor.setTransactionType(PersistenceUnitTransactionType.JTA); - MultiTenancyStrategy multiTenancyStrategy = getMultiTenancyStrategy(persistenceUnitConfig.multitenant); + MultiTenancyStrategy multiTenancyStrategy = getMultiTenancyStrategy(persistenceUnitConfig.multitenant()); collectDialectConfig(persistenceUnitName, persistenceUnitConfig, dbKindMetadataBuildItems, jdbcDataSource, multiTenancyStrategy, systemProperties, descriptor.getProperties()::setProperty, storageEngineCollector); // Physical Naming Strategy - persistenceUnitConfig.physicalNamingStrategy.ifPresent( + persistenceUnitConfig.physicalNamingStrategy().ifPresent( namingStrategy -> descriptor.getProperties() .setProperty(AvailableSettings.PHYSICAL_NAMING_STRATEGY, namingStrategy)); // Implicit Naming Strategy - persistenceUnitConfig.implicitNamingStrategy.ifPresent( + persistenceUnitConfig.implicitNamingStrategy().ifPresent( namingStrategy -> descriptor.getProperties() .setProperty(AvailableSettings.IMPLICIT_NAMING_STRATEGY, namingStrategy)); // Metadata builder contributor - persistenceUnitConfig.metadataBuilderContributor.ifPresent( + persistenceUnitConfig.metadataBuilderContributor().ifPresent( className -> descriptor.getProperties() .setProperty(EntityManagerFactoryBuilderImpl.METADATA_BUILDER_CONTRIBUTOR, className)); // Mapping - if (persistenceUnitConfig.mapping.timezone.timeZoneDefaultStorage.isPresent()) { + if (persistenceUnitConfig.mapping().timezone().timeZoneDefaultStorage().isPresent()) { descriptor.getProperties().setProperty(AvailableSettings.TIMEZONE_DEFAULT_STORAGE, - persistenceUnitConfig.mapping.timezone.timeZoneDefaultStorage.get().name()); + persistenceUnitConfig.mapping().timezone().timeZoneDefaultStorage().get().name()); } descriptor.getProperties().setProperty(AvailableSettings.PREFERRED_POOLED_OPTIMIZER, - persistenceUnitConfig.mapping.id.optimizer.idOptimizerDefault + persistenceUnitConfig.mapping().id().optimizer().idOptimizerDefault() .orElse(HibernateOrmConfigPersistenceUnit.IdOptimizerType.POOLED_LO).configName); //charset descriptor.getProperties().setProperty(AvailableSettings.HBM2DDL_CHARSET_NAME, - persistenceUnitConfig.database.charset.name()); + persistenceUnitConfig.database().charset().name()); // Quoting strategy - if (persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ALL - || persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS - || persistenceUnitConfig.database.globallyQuotedIdentifiers) { + if (persistenceUnitConfig.quoteIdentifiers().strategy() == IdentifierQuotingStrategy.ALL + || persistenceUnitConfig.quoteIdentifiers() + .strategy() == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS + || persistenceUnitConfig.database().globallyQuotedIdentifiers()) { descriptor.getProperties().setProperty(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, "true"); } - if (persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS) { + if (persistenceUnitConfig.quoteIdentifiers().strategy() == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS) { descriptor.getProperties().setProperty( AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS_SKIP_COLUMN_DEFINITIONS, "true"); - } else if (persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ONLY_KEYWORDS) { + } else if (persistenceUnitConfig.quoteIdentifiers().strategy() == IdentifierQuotingStrategy.ONLY_KEYWORDS) { descriptor.getProperties().setProperty(AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true"); } // Query - int batchSize = firstPresent(persistenceUnitConfig.fetch.batchSize, persistenceUnitConfig.batchFetchSize) + int batchSize = firstPresent(persistenceUnitConfig.fetch().batchSize(), persistenceUnitConfig.batchFetchSize()) .orElse(16); if (batchSize > 0) { descriptor.getProperties().setProperty(AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, @@ -991,49 +955,49 @@ private static void producePersistenceUnitDescriptorFromConfig( descriptor.getProperties().setProperty(AvailableSettings.BATCH_FETCH_STYLE, BatchFetchStyle.PADDED.toString()); } - if (persistenceUnitConfig.fetch.maxDepth.isPresent()) { - setMaxFetchDepth(descriptor, persistenceUnitConfig.fetch.maxDepth); - } else if (persistenceUnitConfig.maxFetchDepth.isPresent()) { - setMaxFetchDepth(descriptor, persistenceUnitConfig.maxFetchDepth); + if (persistenceUnitConfig.fetch().maxDepth().isPresent()) { + setMaxFetchDepth(descriptor, persistenceUnitConfig.fetch().maxDepth()); + } else if (persistenceUnitConfig.maxFetchDepth().isPresent()) { + setMaxFetchDepth(descriptor, persistenceUnitConfig.maxFetchDepth()); } descriptor.getProperties().setProperty(AvailableSettings.QUERY_PLAN_CACHE_MAX_SIZE, Integer.toString( - persistenceUnitConfig.query.queryPlanCacheMaxSize)); + persistenceUnitConfig.query().queryPlanCacheMaxSize())); descriptor.getProperties().setProperty(AvailableSettings.DEFAULT_NULL_ORDERING, - persistenceUnitConfig.query.defaultNullOrdering.name().toLowerCase(Locale.ROOT)); + persistenceUnitConfig.query().defaultNullOrdering().name().toLowerCase(Locale.ROOT)); descriptor.getProperties().setProperty(AvailableSettings.IN_CLAUSE_PARAMETER_PADDING, - String.valueOf(persistenceUnitConfig.query.inClauseParameterPadding)); + String.valueOf(persistenceUnitConfig.query().inClauseParameterPadding())); // Disable sequence validations: they are reportedly slow, and people already get the same validation from normal schema validation descriptor.getProperties().put(AvailableSettings.SEQUENCE_INCREMENT_SIZE_MISMATCH_STRATEGY, SequenceMismatchStrategy.NONE); // JDBC - persistenceUnitConfig.jdbc.timezone.ifPresent( + persistenceUnitConfig.jdbc().timezone().ifPresent( timezone -> descriptor.getProperties().setProperty(AvailableSettings.JDBC_TIME_ZONE, timezone)); - persistenceUnitConfig.jdbc.statementFetchSize.ifPresent( + persistenceUnitConfig.jdbc().statementFetchSize().ifPresent( fetchSize -> descriptor.getProperties().setProperty(AvailableSettings.STATEMENT_FETCH_SIZE, String.valueOf(fetchSize))); - persistenceUnitConfig.jdbc.statementBatchSize.ifPresent( + persistenceUnitConfig.jdbc().statementBatchSize().ifPresent( fetchSize -> descriptor.getProperties().setProperty(AvailableSettings.STATEMENT_BATCH_SIZE, String.valueOf(fetchSize))); // Statistics - if (hibernateOrmConfig.metrics.enabled - || (hibernateOrmConfig.statistics.isPresent() && hibernateOrmConfig.statistics.get())) { + if (hibernateOrmConfig.metrics().enabled() + || (hibernateOrmConfig.statistics().isPresent() && hibernateOrmConfig.statistics().get())) { descriptor.getProperties().setProperty(AvailableSettings.GENERATE_STATISTICS, "true"); //When statistics are enabled, the default in Hibernate ORM is to also log them after each // session; turn that off by default as it's very noisy: descriptor.getProperties().setProperty(AvailableSettings.LOG_SESSION_METRICS, - String.valueOf(hibernateOrmConfig.logSessionMetrics.orElse(false))); + String.valueOf(hibernateOrmConfig.logSessionMetrics().orElse(false))); } // sql-load-scripts - List importFiles = getSqlLoadScript(persistenceUnitConfig.sqlLoadScript, launchMode); + List importFiles = getSqlLoadScript(persistenceUnitConfig.sqlLoadScript(), launchMode); if (!importFiles.isEmpty()) { for (String importFile : importFiles) { @@ -1044,19 +1008,19 @@ private static void producePersistenceUnitDescriptorFromConfig( throw new ConfigurationException( "Unable to interpret path referenced in '" + HibernateOrmRuntimeConfig.puPropertyKey(persistenceUnitName, "sql-load-script") + "=" - + String.join(",", persistenceUnitConfig.sqlLoadScript.get()) + + String.join(",", persistenceUnitConfig.sqlLoadScript().get()) + "': " + e.getMessage()); } if (loadScriptPath != null && !Files.isDirectory(loadScriptPath)) { // enlist resource if present nativeImageResources.produce(new NativeImageResourceBuildItem(importFile)); - } else if (persistenceUnitConfig.sqlLoadScript.isPresent()) { + } else if (persistenceUnitConfig.sqlLoadScript().isPresent()) { //raise exception if explicit file is not present (i.e. not the default) throw new ConfigurationException( "Unable to find file referenced in '" + HibernateOrmRuntimeConfig.puPropertyKey(persistenceUnitName, "sql-load-script") + "=" - + String.join(",", persistenceUnitConfig.sqlLoadScript.get()) + + String.join(",", persistenceUnitConfig.sqlLoadScript().get()) + "'. Remove property or add file to your path."); } // in dev mode we want to make sure that we watch for changes to file even if it doesn't currently exist @@ -1065,7 +1029,7 @@ private static void producePersistenceUnitDescriptorFromConfig( } // only set the found import files if configured - if (persistenceUnitConfig.sqlLoadScript.isPresent()) { + if (persistenceUnitConfig.sqlLoadScript().isPresent()) { descriptor.getProperties().setProperty(AvailableSettings.HBM2DDL_IMPORT_FILES, String.join(",", importFiles)); } } else { @@ -1074,7 +1038,7 @@ private static void producePersistenceUnitDescriptorFromConfig( } // Caching - if (persistenceUnitConfig.secondLevelCachingEnabled) { + if (persistenceUnitConfig.secondLevelCachingEnabled()) { Properties p = descriptor.getProperties(); //Only set these if the user isn't making an explicit choice: p.putIfAbsent(USE_DIRECT_REFERENCE_CACHE_ENTRIES, Boolean.TRUE); @@ -1097,7 +1061,7 @@ private static void producePersistenceUnitDescriptorFromConfig( // Hibernate Validator integration: we force the callback mode to have bootstrap errors reported rather than validation ignored // if there is any issue when bootstrapping Hibernate Validator. if (capabilities.isPresent(Capability.HIBERNATE_VALIDATOR)) { - if (persistenceUnitConfig.validation.enabled) { + if (persistenceUnitConfig.validation().enabled()) { descriptor.getProperties().setProperty(AvailableSettings.JAKARTA_VALIDATION_MODE, ValidationMode.CALLBACK.name()); } else { @@ -1107,7 +1071,7 @@ private static void producePersistenceUnitDescriptorFromConfig( // Discriminator Column descriptor.getProperties().setProperty(AvailableSettings.IGNORE_EXPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS, - String.valueOf(persistenceUnitConfig.discriminator.ignoreExplicitForJoined)); + String.valueOf(persistenceUnitConfig.discriminator().ignoreExplicitForJoined())); persistenceUnitDescriptors.produce( new PersistenceUnitDescriptorBuildItem(descriptor, @@ -1117,9 +1081,9 @@ private static void producePersistenceUnitDescriptorFromConfig( jdbcDataSource.map(JdbcDataSourceBuildItem::getDbKind), jdbcDataSource.flatMap(JdbcDataSourceBuildItem::getDbVersion), multiTenancyStrategy, - hibernateOrmConfig.database.ormCompatibilityVersion, - persistenceUnitConfig.unsupportedProperties), - persistenceUnitConfig.multitenantSchemaDatasource.orElse(null), + hibernateOrmConfig.database().ormCompatibilityVersion(), + persistenceUnitConfig.unsupportedProperties()), + persistenceUnitConfig.multitenantSchemaDatasource().orElse(null), xmlMappings, false, false, capabilities)); } @@ -1130,7 +1094,7 @@ private static void collectDialectConfig(String persistenceUnitName, MultiTenancyStrategy multiTenancyStrategy, BuildProducer systemProperties, BiConsumer puPropertiesCollector, Set storageEngineCollector) { - Optional explicitDialect = persistenceUnitConfig.dialect.dialect; + Optional explicitDialect = persistenceUnitConfig.dialect().dialect(); Optional dbKind = jdbcDataSource.map(JdbcDataSourceBuildItem::getDbKind); Optional explicitDbMinVersion = jdbcDataSource.flatMap(JdbcDataSourceBuildItem::getDbVersion); if (multiTenancyStrategy != MultiTenancyStrategy.DATABASE && jdbcDataSource.isEmpty()) { @@ -1182,14 +1146,14 @@ private static void collectDialectConfig(String persistenceUnitName, persistenceUnitName)); } - if (persistenceUnitConfig.dialect.storageEngine.isPresent()) { + if (persistenceUnitConfig.dialect().storageEngine().isPresent()) { // Only actually set the storage engines if MySQL or MariaDB if (isMySQLOrMariaDB(dialect.get())) { // The storage engine has to be set as a system property. // We record it so that we can later run checks (because we can only set a single value) - storageEngineCollector.add(persistenceUnitConfig.dialect.storageEngine.get()); + storageEngineCollector.add(persistenceUnitConfig.dialect().storageEngine().get()); systemProperties.produce(new SystemPropertyBuildItem(AvailableSettings.STORAGE_ENGINE, - persistenceUnitConfig.dialect.storageEngine.get())); + persistenceUnitConfig.dialect().storageEngine().get())); } else { LOG.warnf("The storage engine set through configuration property '%1$s' is being ignored" + " because the database is neither MySQL nor MariaDB.", @@ -1224,8 +1188,8 @@ private static void collectDialectConfigForPersistenceXml(String persistenceUnit private static Optional findJdbcDataSource(String persistenceUnitName, HibernateOrmConfigPersistenceUnit persistenceUnitConfig, List jdbcDataSources) { - if (persistenceUnitConfig.datasource.isPresent()) { - String dataSourceName = persistenceUnitConfig.datasource.get(); + if (persistenceUnitConfig.datasource().isPresent()) { + String dataSourceName = persistenceUnitConfig.datasource().get(); return Optional.of(jdbcDataSources.stream() .filter(i -> dataSourceName.equals(i.getName())) .findFirst() @@ -1293,22 +1257,23 @@ public static Map> getModelClassesAndPackagesPerPersistenceU // handle the default persistence unit if (enableDefaultPersistenceUnit) { - if (!hibernateOrmConfig.defaultPersistenceUnit.packages.isPresent()) { + if (!hibernateOrmConfig.defaultPersistenceUnit().packages().isPresent()) { throw new ConfigurationException("Packages must be configured for the default persistence unit."); } - for (String packageName : hibernateOrmConfig.defaultPersistenceUnit.packages.get()) { + for (String packageName : hibernateOrmConfig.defaultPersistenceUnit().packages().get()) { packageRules.computeIfAbsent(normalizePackage(packageName), p -> new HashSet<>()) .add(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME); } } // handle the named persistence units - for (Entry candidatePersistenceUnitEntry : hibernateOrmConfig.persistenceUnits + for (Entry candidatePersistenceUnitEntry : hibernateOrmConfig + .namedPersistenceUnits() .entrySet()) { String candidatePersistenceUnitName = candidatePersistenceUnitEntry.getKey(); - Set candidatePersistenceUnitPackages = candidatePersistenceUnitEntry.getValue().packages + Set candidatePersistenceUnitPackages = candidatePersistenceUnitEntry.getValue().packages() .orElseThrow(() -> new ConfigurationException(String.format(Locale.ROOT, "Packages must be configured for persistence unit '%s'.", candidatePersistenceUnitName))); @@ -1333,7 +1298,7 @@ public static Map> getModelClassesAndPackagesPerPersistenceU .add(persistenceUnitName); } } - } else if (!hibernateOrmConfig.persistenceUnits.isEmpty()) { + } else if (!hibernateOrmConfig.namedPersistenceUnits().isEmpty()) { throw new ConfigurationException( "Multiple persistence units are defined but the entities are not mapped to them. You should either use the .packages Quarkus configuration property or package-level @PersistenceUnit annotations."); } else { @@ -1439,9 +1404,9 @@ private static String normalizePackage(String pakkage) { } private static boolean hasPackagesInQuarkusConfig(HibernateOrmConfig hibernateOrmConfig) { - for (HibernateOrmConfigPersistenceUnit persistenceUnitConfig : hibernateOrmConfig.getAllPersistenceUnitConfigsAsMap() + for (HibernateOrmConfigPersistenceUnit persistenceUnitConfig : hibernateOrmConfig.persistenceUnits() .values()) { - if (persistenceUnitConfig.packages.isPresent()) { + if (persistenceUnitConfig.packages().isPresent()) { return true; } } @@ -1480,7 +1445,7 @@ private static Collection getPackageLevelPersistenceUnitAnno * @return true if we're expected to ignore them */ private boolean shouldIgnorePersistenceXmlResources(HibernateOrmConfig config) { - return config.persistenceXml.ignore || Boolean.getBoolean("SKIP_PARSE_PERSISTENCE_XML"); + return config.persistenceXml().ignore() || Boolean.getBoolean("SKIP_PARSE_PERSISTENCE_XML"); } /** diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/dev/HibernateOrmDevServicesProcessor.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/dev/HibernateOrmDevServicesProcessor.java index c47ba9e5666077..a6c15baddbba7d 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/dev/HibernateOrmDevServicesProcessor.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/dev/HibernateOrmDevServicesProcessor.java @@ -36,9 +36,9 @@ void devServicesAutoGenerateByDefault(List s Set managedSources = schemaReadyBuildItems.stream().map(JdbcDataSourceSchemaReadyBuildItem::getDatasourceNames) .collect(HashSet::new, Collection::addAll, Collection::addAll); - for (Map.Entry entry : config.getAllPersistenceUnitConfigsAsMap() + for (Map.Entry entry : config.persistenceUnits() .entrySet()) { - Optional dataSourceName = entry.getValue().datasource; + Optional dataSourceName = entry.getValue().datasource(); List propertyKeysIndicatingDataSourceConfigured = DataSourceUtil .dataSourcePropertyKeys(dataSourceName.orElse(null), "username"); diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/metrics/HibernateOrmMetricsProcessor.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/metrics/HibernateOrmMetricsProcessor.java index 155c09b7e39573..d5567e1bca318c 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/metrics/HibernateOrmMetricsProcessor.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/metrics/HibernateOrmMetricsProcessor.java @@ -33,7 +33,7 @@ public void metrics(HibernateOrmConfig config, // IF Hibernate metrics and Hibernate statistics are enabled // then define a consumer. It will only be invoked if metrics is enabled - if (config.metrics.enabled && config.statistics.orElse(true) && metricsConfiguration.isPresent()) { + if (config.metrics().enabled() && config.statistics().orElse(true) && metricsConfiguration.isPresent()) { datasourceMetrics.produce(new MetricsFactoryConsumerBuildItem(metricsRecorder.consumeMetricsFactory())); } } diff --git a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js index f28762193739a8..0d6e245238ad13 100644 --- a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js +++ b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js @@ -5,16 +5,16 @@ import '@vaadin/button'; import '@vaadin/grid'; import { columnBodyRenderer } from '@vaadin/grid/lit.js'; import { notifier } from 'notifier'; +import { observeState } from 'lit-element-state'; +import { themeState } from 'theme-state'; +import '@quarkus-webcomponents/codeblock'; -export class HibernateOrmPersistenceUnitsComponent extends LitElement { +export class HibernateOrmPersistenceUnitsComponent extends observeState(LitElement) { static styles = css` .full-height { height: 100%; } - .ddl-script { - padding: 5px; - } a.script-heading { display: block; float:left; @@ -81,7 +81,11 @@ export class HibernateOrmPersistenceUnitsComponent extends LitElement { -

${pu.createDDL}
+ + @@ -96,7 +100,11 @@ export class HibernateOrmPersistenceUnitsComponent extends LitElement { -
${pu.updateDDL}
+ +
@@ -111,13 +119,17 @@ export class HibernateOrmPersistenceUnitsComponent extends LitElement { -
${pu.dropDDL}
+ +
`; } _copyToClipboard(event, what) { event.stopPropagation(); - var text = event.target.closest("vaadin-details").querySelector(".ddl-script").textContent; + var text = event.target.closest("vaadin-details").querySelector("qui-code-block").value; var listener = function(ev) { ev.clipboardData.setData("text/plain", text); ev.preventDefault(); diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsInterceptorTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsInterceptorTest.java index d5f73061f69ad9..74b7d037bf1c07 100644 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsInterceptorTest.java +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsInterceptorTest.java @@ -60,8 +60,12 @@ public void initData() throws Exception { transaction.begin(); DefaultEntity entity = new DefaultEntity("default"); defaultSession.persist(entity); + transaction.commit(); + transaction.begin(); User user = new User("user"); usersSession.persist(user); + transaction.commit(); + transaction.begin(); Plane plane = new Plane("plane"); inventorySession.persist(plane); transaction.commit(); diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsPackageAnnotationsTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsPackageAnnotationsTest.java index 86265c074ecc71..b73c006ce0e467 100644 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsPackageAnnotationsTest.java +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsPackageAnnotationsTest.java @@ -6,6 +6,7 @@ import jakarta.inject.Inject; import jakarta.persistence.EntityManager; import jakarta.transaction.Transactional; +import jakarta.transaction.UserTransaction; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -39,26 +40,34 @@ public class MultiplePersistenceUnitsPackageAnnotationsTest { @PersistenceUnit("inventory") EntityManager inventoryEntityManager; + @Inject + UserTransaction transaction; + @Test - @Transactional - public void testDefault() { + public void testDefault() throws Exception { + transaction.begin(); SharedEntity defaultEntity = new SharedEntity("default"); defaultEntityManager.persist(defaultEntity); SharedEntity savedDefaultEntity = defaultEntityManager.find(SharedEntity.class, defaultEntity.getId()); assertEquals(defaultEntity.getName(), savedDefaultEntity.getName()); + transaction.commit(); + transaction.begin(); SharedEntity defaultEntity2 = new SharedEntity("default2"); usersEntityManager.persist(defaultEntity2); SharedEntity savedDefaultEntity2 = usersEntityManager.find(SharedEntity.class, defaultEntity2.getId()); assertEquals(defaultEntity2.getName(), savedDefaultEntity2.getName()); + transaction.commit(); + transaction.begin(); SharedEntity defaultEntity3 = new SharedEntity("default3"); inventoryEntityManager.persist(defaultEntity3); SharedEntity savedDefaultEntity3 = inventoryEntityManager.find(SharedEntity.class, defaultEntity3.getId()); assertEquals(defaultEntity3.getName(), savedDefaultEntity3.getName()); + transaction.commit(); } @Test @@ -75,7 +84,7 @@ public void testUser() { OtherUserInSubPackage savedOtherUserInSubPackage = usersEntityManager.find(OtherUserInSubPackage.class, otherUserInSubPackage.getId()); - assertEquals(savedOtherUserInSubPackage.getName(), savedOtherUserInSubPackage.getName()); + assertEquals(otherUserInSubPackage.getName(), savedOtherUserInSubPackage.getName()); } @Test diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionEntityManagerTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionEntityManagerTest.java index d115d67dfb8424..52a5fafc14ea1f 100644 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionEntityManagerTest.java +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionEntityManagerTest.java @@ -58,7 +58,6 @@ public void testUserInInventoryEntityManager() { } @Test - @Transactional public void testAccessBothPersistenceUnits() { testUser(); testPlane(); diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionSessionTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionSessionTest.java index b13741e7a94904..1d5e0ab49a7dc0 100644 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionSessionTest.java +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/MultiplePersistenceUnitsResourceInjectionSessionTest.java @@ -58,7 +58,6 @@ public void testUserInInventorySession() { } @Test - @Transactional public void testAccessBothPersistenceUnits() { testUser(); testPlane(); diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java index f98ba87694899d..18fd28d86a15d2 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/FastBootHibernatePersistenceProvider.java @@ -147,8 +147,6 @@ private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String throw new PersistenceException("No name provided and multiple persistence units found"); } - Map puConfigMap = hibernateOrmRuntimeConfig - .getAllPersistenceUnitConfigsAsMap(); for (RuntimePersistenceUnitDescriptor persistenceUnit : units) { log.debugf( "Checking persistence-unit [name=%s, explicit-provider=%s] against incoming persistence unit name [%s]", @@ -174,9 +172,8 @@ private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String "Attempting to boot a blocking Hibernate ORM instance on a reactive RecordedState"); } final PrevalidatedQuarkusMetadata metadata = recordedState.getMetadata(); - var puConfig = puConfigMap.getOrDefault(persistenceUnit.getConfigurationName(), - new HibernateOrmRuntimeConfigPersistenceUnit()); - if (puConfig.active.isPresent() && !puConfig.active.get()) { + var puConfig = hibernateOrmRuntimeConfig.persistenceUnits().get(persistenceUnit.getConfigurationName()); + if (puConfig.active().isPresent() && !puConfig.active().get()) { throw new IllegalStateException( "Attempting to boot a deactivated Hibernate ORM persistence unit"); } @@ -228,7 +225,7 @@ private RuntimeSettings buildRuntimeSettings(String persistenceUnitName, Recorde // Allow detection of driver/database capabilities on runtime init (was disabled during static init) runtimeSettingsBuilder.put("hibernate.temp.use_jdbc_metadata_defaults", "true"); - if (!persistenceUnitConfig.unsupportedProperties.isEmpty()) { + if (!persistenceUnitConfig.unsupportedProperties().isEmpty()) { log.warnf("Persistence-unit [%s] sets unsupported properties." + " These properties may not work correctly, and even if they do," + " that may change when upgrading to a newer version of Quarkus (even just a micro/patch version)." @@ -237,9 +234,9 @@ private RuntimeSettings buildRuntimeSettings(String persistenceUnitName, Recorde + " and more importantly so that the configuration property is tested regularly." + " Unsupported properties being set: %s", persistenceUnitName, - persistenceUnitConfig.unsupportedProperties.keySet()); + persistenceUnitConfig.unsupportedProperties().keySet()); } - for (Map.Entry entry : persistenceUnitConfig.unsupportedProperties.entrySet()) { + for (Map.Entry entry : persistenceUnitConfig.unsupportedProperties().entrySet()) { var key = entry.getKey(); if (runtimeSettingsBuilder.get(key) != null) { log.warnf("Persistence-unit [%s] sets property '%s' to a custom value through '%s'," @@ -266,7 +263,7 @@ private RuntimeSettings buildRuntimeSettings(String persistenceUnitName, Recorde persistenceUnitName, HibernateOrmRuntimeConfig.puPropertyKey(persistenceUnitName, "database.orm-compatibility.version"), databaseOrmCompatibilityVersion.externalRepresentation, - persistenceUnitConfig.unsupportedProperties.keySet()); + persistenceUnitConfig.unsupportedProperties().keySet()); } for (Map.Entry entry : databaseOrmCompatibilitySettings.entrySet()) { var key = entry.getKey(); @@ -403,12 +400,12 @@ private static void injectRuntimeConfiguration(HibernateOrmRuntimeConfigPersiste Builder runtimeSettingsBuilder) { // Database runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, - persistenceUnitConfig.database.generation.generation); + persistenceUnitConfig.database().generation().generation()); runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCHEMAS, - String.valueOf(persistenceUnitConfig.database.generation.createSchemas)); + String.valueOf(persistenceUnitConfig.database().generation().createSchemas())); - if (persistenceUnitConfig.database.generation.haltOnError) { + if (persistenceUnitConfig.database().generation().haltOnError()) { runtimeSettingsBuilder.put(AvailableSettings.HBM2DDL_HALT_ON_ERROR, "true"); } @@ -416,41 +413,41 @@ private static void injectRuntimeConfiguration(HibernateOrmRuntimeConfigPersiste runtimeSettingsBuilder.put(AvailableSettings.HBM2DDL_SCRIPTS_CREATE_APPEND, "false"); runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION, - persistenceUnitConfig.scripts.generation.generation); + persistenceUnitConfig.scripts().generation().generation()); - if (persistenceUnitConfig.scripts.generation.createTarget.isPresent()) { + if (persistenceUnitConfig.scripts().generation().createTarget().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.HBM2DDL_SCRIPTS_CREATE_TARGET, - persistenceUnitConfig.scripts.generation.createTarget.get()); + persistenceUnitConfig.scripts().generation().createTarget().get()); } - if (persistenceUnitConfig.scripts.generation.dropTarget.isPresent()) { + if (persistenceUnitConfig.scripts().generation().dropTarget().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.HBM2DDL_SCRIPTS_DROP_TARGET, - persistenceUnitConfig.scripts.generation.dropTarget.get()); + persistenceUnitConfig.scripts().generation().dropTarget().get()); } - persistenceUnitConfig.database.defaultCatalog.ifPresent( + persistenceUnitConfig.database().defaultCatalog().ifPresent( catalog -> runtimeSettingsBuilder.put(AvailableSettings.DEFAULT_CATALOG, catalog)); - persistenceUnitConfig.database.defaultSchema.ifPresent( + persistenceUnitConfig.database().defaultSchema().ifPresent( schema -> runtimeSettingsBuilder.put(AvailableSettings.DEFAULT_SCHEMA, schema)); // Logging - if (persistenceUnitConfig.log.sql) { + if (persistenceUnitConfig.log().sql()) { runtimeSettingsBuilder.put(AvailableSettings.SHOW_SQL, "true"); - if (persistenceUnitConfig.log.formatSql) { + if (persistenceUnitConfig.log().formatSql()) { runtimeSettingsBuilder.put(AvailableSettings.FORMAT_SQL, "true"); } } - if (persistenceUnitConfig.log.jdbcWarnings.isPresent()) { + if (persistenceUnitConfig.log().jdbcWarnings().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.LOG_JDBC_WARNINGS, - persistenceUnitConfig.log.jdbcWarnings.get().toString()); + persistenceUnitConfig.log().jdbcWarnings().get().toString()); } - if (persistenceUnitConfig.log.queriesSlowerThanMs.isPresent()) { + if (persistenceUnitConfig.log().queriesSlowerThanMs().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.LOG_SLOW_QUERY, - persistenceUnitConfig.log.queriesSlowerThanMs.get()); + persistenceUnitConfig.log().queriesSlowerThanMs().get()); } } diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmDisabledRecorder.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmDisabledRecorder.java index a60b25ffc12160..0ada9c5da2c2a0 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmDisabledRecorder.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmDisabledRecorder.java @@ -9,9 +9,9 @@ public class HibernateOrmDisabledRecorder { public void checkNoExplicitActiveTrue(HibernateOrmRuntimeConfig runtimeConfig) { - for (var entry : runtimeConfig.getAllPersistenceUnitConfigsAsMap().entrySet()) { + for (var entry : runtimeConfig.persistenceUnits().entrySet()) { var config = entry.getValue(); - if (config.active.isPresent() && config.active.get()) { + if (config.active().isPresent() && config.active().get()) { var puName = entry.getKey(); String enabledPropertyKey = HibernateOrmRuntimeConfig.extensionPropertyKey("enabled"); String activePropertyKey = HibernateOrmRuntimeConfig.puPropertyKey(puName, "active"); diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java index 5392edca301b13..a7410b5a49a95a 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java @@ -1,53 +1,36 @@ package io.quarkus.hibernate.orm.runtime; import java.util.Map; -import java.util.TreeMap; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; +@ConfigMapping(prefix = "quarkus.hibernate-orm") @ConfigRoot(phase = ConfigPhase.RUN_TIME) -public class HibernateOrmRuntimeConfig { +public interface HibernateOrmRuntimeConfig { /** - * Configuration for the default persistence unit. + * Configuration for persistence units. */ - @ConfigItem(name = ConfigItem.PARENT) - public HibernateOrmRuntimeConfigPersistenceUnit defaultPersistenceUnit; - - /** - * Additional named persistence units. - */ - @ConfigDocSection + @WithParentName + @WithUnnamedKey(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME) + @WithDefaults @ConfigDocMapKey("persistence-unit-name") - @ConfigItem(name = ConfigItem.PARENT) - public Map persistenceUnits; + Map persistenceUnits(); - public Map getAllPersistenceUnitConfigsAsMap() { - Map map = new TreeMap<>(); - if (defaultPersistenceUnit != null) { - map.put(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME, defaultPersistenceUnit); - } - map.putAll(persistenceUnits); - return map; - } - - public static String extensionPropertyKey(String radical) { + static String extensionPropertyKey(String radical) { return "quarkus.hibernate-orm." + radical; } - public static String puPropertyKey(String puName, String radical) { + static String puPropertyKey(String puName, String radical) { String prefix = PersistenceUnitUtil.isDefaultPersistenceUnit(puName) ? "quarkus.hibernate-orm." : "quarkus.hibernate-orm.\"" + puName + "\"."; return prefix + radical; } - - public boolean isAnyPropertySet() { - return defaultPersistenceUnit.isAnyPropertySet() || - !persistenceUnits.isEmpty(); - } } diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfigPersistenceUnit.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfigPersistenceUnit.java index a272afe7f6cac6..15ff20cea6f625 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfigPersistenceUnit.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfigPersistenceUnit.java @@ -1,18 +1,19 @@ package io.quarkus.hibernate.orm.runtime; -import java.util.HashMap; import java.util.Map; import java.util.Optional; +import io.quarkus.runtime.annotations.ConfigDocDefault; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConvertWith; import io.quarkus.runtime.configuration.TrimmedStringConverter; +import io.smallrye.config.WithConverter; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithParentName; @ConfigGroup -public class HibernateOrmRuntimeConfigPersistenceUnit { +public interface HibernateOrmRuntimeConfigPersistenceUnit { /** * Whether this persistence unit should be active at runtime. @@ -28,29 +29,26 @@ public class HibernateOrmRuntimeConfigPersistenceUnit { * * @asciidoclet */ - @ConfigItem(defaultValueDocumentation = "'true' if Hibernate ORM is enabled; 'false' otherwise") - public Optional active = Optional.empty(); + @ConfigDocDefault("'true' if Hibernate ORM is enabled; 'false' otherwise") + Optional active(); /** * Database related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitDatabase database = new HibernateOrmConfigPersistenceUnitDatabase(); + HibernateOrmConfigPersistenceUnitDatabase database(); /** * Database scripts related configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitScripts scripts = new HibernateOrmConfigPersistenceUnitScripts(); + HibernateOrmConfigPersistenceUnitScripts scripts(); /** * Logging configuration. */ - @ConfigItem @ConfigDocSection - public HibernateOrmConfigPersistenceUnitLog log = new HibernateOrmConfigPersistenceUnitLog(); + HibernateOrmConfigPersistenceUnitLog log(); /** * Properties that should be passed on directly to Hibernate ORM. @@ -72,63 +70,43 @@ public class HibernateOrmRuntimeConfigPersistenceUnit { * * @asciidoclet */ - @ConfigItem @ConfigDocMapKey("full-property-key") - public Map unsupportedProperties = new HashMap<>(); - - public boolean isAnyPropertySet() { - return database.isAnyPropertySet() || - scripts.isAnyPropertySet() || - log.isAnyPropertySet() || - !unsupportedProperties.isEmpty(); - } + Map unsupportedProperties(); @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitDatabase { + interface HibernateOrmConfigPersistenceUnitDatabase { /** * Schema generation configuration. */ - @ConfigItem - public HibernateOrmConfigPersistenceUnitDatabaseGeneration generation = new HibernateOrmConfigPersistenceUnitDatabaseGeneration(); + HibernateOrmConfigPersistenceUnitDatabaseGeneration generation(); /** * The default catalog to use for the database objects. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional defaultCatalog = Optional.empty(); + @WithConverter(TrimmedStringConverter.class) + Optional defaultCatalog(); /** * The default schema to use for the database objects. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional defaultSchema = Optional.empty(); - - public boolean isAnyPropertySet() { - return generation.isAnyPropertySet() - || defaultCatalog.isPresent() - || defaultSchema.isPresent(); - } + @WithConverter(TrimmedStringConverter.class) + Optional defaultSchema(); + } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitScripts { + interface HibernateOrmConfigPersistenceUnitScripts { /** * Schema generation configuration. */ - @ConfigItem - public HibernateOrmConfigPersistenceUnitScriptGeneration generation = new HibernateOrmConfigPersistenceUnitScriptGeneration(); + HibernateOrmConfigPersistenceUnitScriptGeneration generation(); - public boolean isAnyPropertySet() { - return generation.isAnyPropertySet(); - } } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitDatabaseGeneration { + interface HibernateOrmConfigPersistenceUnitDatabaseGeneration { /** * Select whether the database schema is generated or not. @@ -140,94 +118,80 @@ public static class HibernateOrmConfigPersistenceUnitDatabaseGeneration { * * Accepted values: `none`, `create`, `drop-and-create`, `drop`, `update`, `validate`. */ - @ConfigItem(name = ConfigItem.PARENT, defaultValue = "none") - @ConvertWith(TrimmedStringConverter.class) - public String generation = "none"; + @WithParentName + @WithDefault("none") + @WithConverter(TrimmedStringConverter.class) + String generation(); /** * If Hibernate ORM should create the schemas automatically (for databases supporting them). */ - @ConfigItem - public boolean createSchemas = false; + @WithDefault("false") + boolean createSchemas(); /** * Whether we should stop on the first error when applying the schema. */ - @ConfigItem - public boolean haltOnError = false; - - public boolean isAnyPropertySet() { - return !"none".equals(generation) - || createSchemas - || haltOnError; - } + @WithDefault("false") + boolean haltOnError(); + } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitScriptGeneration { + interface HibernateOrmConfigPersistenceUnitScriptGeneration { /** * Select whether the database schema DDL files are generated or not. * * Accepted values: `none`, `create`, `drop-and-create`, `drop`, `update`, `validate`. */ - @ConfigItem(name = ConfigItem.PARENT, defaultValue = "none") - @ConvertWith(TrimmedStringConverter.class) - public String generation = "none"; + @WithParentName + @WithDefault("none") + @WithConverter(TrimmedStringConverter.class) + String generation(); /** * Filename or URL where the database create DDL file should be generated. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional createTarget = Optional.empty(); + @WithConverter(TrimmedStringConverter.class) + Optional createTarget(); /** * Filename or URL where the database drop DDL file should be generated. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional dropTarget = Optional.empty(); - - public boolean isAnyPropertySet() { - return !"none".equals(generation) - || createTarget.isPresent() - || dropTarget.isPresent(); - } + @WithConverter(TrimmedStringConverter.class) + Optional dropTarget(); + } @ConfigGroup - public static class HibernateOrmConfigPersistenceUnitLog { + interface HibernateOrmConfigPersistenceUnitLog { /** * Show SQL logs and format them nicely. *

* Setting it to true is obviously not recommended in production. */ - @ConfigItem - public boolean sql = false; + @WithDefault("false") + boolean sql(); /** * Format the SQL logs if SQL log is enabled */ - @ConfigItem(defaultValue = "true") - public boolean formatSql = true; + @WithDefault("true") + boolean formatSql(); /** * Whether JDBC warnings should be collected and logged. */ - @ConfigItem(defaultValueDocumentation = "depends on dialect") - public Optional jdbcWarnings = Optional.empty(); + @ConfigDocDefault("depends on dialect") + Optional jdbcWarnings(); /** * If set, Hibernate will log queries that took more than specified number of milliseconds to execute. */ - @ConfigItem - public Optional queriesSlowerThanMs = Optional.empty(); + Optional queriesSlowerThanMs(); - public boolean isAnyPropertySet() { - return sql || !formatSql || jdbcWarnings.isPresent() || queriesSlowerThanMs.isPresent(); - } } } diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java index f7c05f7530e446..5a47be2751500e 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java @@ -29,13 +29,10 @@ public class JPAConfig { @Inject public JPAConfig(HibernateOrmRuntimeConfig hibernateOrmRuntimeConfig) { - Map puConfigMap = hibernateOrmRuntimeConfig - .getAllPersistenceUnitConfigsAsMap(); for (RuntimePersistenceUnitDescriptor descriptor : PersistenceUnitsHolder.getPersistenceUnitDescriptors()) { String puName = descriptor.getName(); - var puConfig = puConfigMap.getOrDefault(descriptor.getConfigurationName(), - new HibernateOrmRuntimeConfigPersistenceUnit()); - if (puConfig.active.isPresent() && !puConfig.active.get()) { + var puConfig = hibernateOrmRuntimeConfig.persistenceUnits().get(descriptor.getConfigurationName()); + if (puConfig.active().isPresent() && !puConfig.active().get()) { LOGGER.infof("Hibernate ORM persistence unit '%s' was deactivated through configuration properties", puName); deactivatedPersistenceUnitNames.add(puName); diff --git a/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveEnabled.java b/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveEnabled.java index b99f73e8ee4e8c..459f04197e42e9 100644 --- a/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveEnabled.java +++ b/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveEnabled.java @@ -24,7 +24,7 @@ public class HibernateReactiveEnabled implements BooleanSupplier { @Override public boolean getAsBoolean() { - return config.enabled; + return config.enabled(); } } diff --git a/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveProcessor.java b/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveProcessor.java index d8c2130bf24459..37303a0f579a1f 100644 --- a/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveProcessor.java +++ b/extensions/hibernate-reactive/deployment/src/main/java/io/quarkus/hibernate/reactive/deployment/HibernateReactiveProcessor.java @@ -93,13 +93,19 @@ public final class HibernateReactiveProcessor { void registerBeans(BuildProducer additionalBeans, CombinedIndexBuildItem combinedIndex, List descriptors, JpaModelBuildItem jpaModel) { + if (!hasEntities(jpaModel)) { + LOG.warnf("Skip registration of the %s bean - no JPA entities were found", + ReactiveSessionFactoryProducer.class.getSimpleName()); + return; + } if (descriptors.size() == 1) { // Only register the bean if their EMF dependency is also available, so use the same guard as the ORM extension additionalBeans.produce(new AdditionalBeanBuildItem(ReactiveSessionFactoryProducer.class)); } else { - LOG.warnf( - "Skipping registration of %s bean because exactly one persistence unit is required for their registration", - ReactiveSessionFactoryProducer.class.getSimpleName()); + throw new ConfigurationException( + "The Hibernate Reactive extension requires exactly one persistence unit configured: " + descriptors + .stream() + .map(PersistenceUnitDescriptorBuildItem::getPersistenceUnitName).collect(Collectors.toList())); } } @@ -163,7 +169,7 @@ public void buildReactivePersistenceUnit( DataSourceBuildTimeConfig defaultDataSourceBuildTimeConfig = dataSourcesBuildTimeConfig.dataSources() .get(DataSourceUtil.DEFAULT_DATASOURCE_NAME); - Optional explicitDialect = hibernateOrmConfig.defaultPersistenceUnit.dialect.dialect; + Optional explicitDialect = hibernateOrmConfig.defaultPersistenceUnit().dialect().dialect(); Optional explicitDbMinVersion = defaultDataSourceBuildTimeConfig.dbVersion(); Optional dbKindOptional = DefaultDataSourceDbKindBuildItem.resolve( defaultDataSourceBuildTimeConfig.dbKind(), @@ -172,8 +178,13 @@ public void buildReactivePersistenceUnit( .orElse(!dataSourcesBuildTimeConfig.hasNamedDataSources()), curateOutcomeBuildItem); - if (dbKindOptional.isPresent()) { - HibernateOrmConfigPersistenceUnit persistenceUnitConfig = hibernateOrmConfig.defaultPersistenceUnit; + if (dbKindOptional.isEmpty()) { + throw new ConfigurationException( + "The default datasource must be configured for Hibernate Reactive. Refer to https://quarkus.io/guides/datasource for guidance.", + Set.of("quarkus.datasource.db-kind", "quarkus.datasource.username", + "quarkus.datasource.password")); + } else { + HibernateOrmConfigPersistenceUnit persistenceUnitConfig = hibernateOrmConfig.defaultPersistenceUnit(); ParsedPersistenceXmlDescriptor reactivePU = generateReactivePersistenceUnit( hibernateOrmConfig, index, persistenceUnitConfig, jpaModel, dbKindOptional, explicitDialect, explicitDbMinVersion, applicationArchivesBuildItem, @@ -189,8 +200,8 @@ public void buildReactivePersistenceUnit( new RecordedConfig(Optional.of(DataSourceUtil.DEFAULT_DATASOURCE_NAME), dbKindOptional, Optional.empty(), io.quarkus.hibernate.orm.runtime.migration.MultiTenancyStrategy.NONE, - hibernateOrmConfig.database.ormCompatibilityVersion, - persistenceUnitConfig.unsupportedProperties), + hibernateOrmConfig.database().ormCompatibilityVersion(), + persistenceUnitConfig.unsupportedProperties()), null, jpaModel.getXmlMappings(reactivePU.getName()), true, false, capabilities)); @@ -258,7 +269,7 @@ private static ParsedPersistenceXmlDescriptor generateReactivePersistenceUnit( desc.setTransactionType(PersistenceUnitTransactionType.RESOURCE_LOCAL); setDialectAndStorageEngine(dbKindOptional, explicitDialect, explicitDbMinVersion, dbKindDialectBuildItems, - persistenceUnitConfig.dialect.storageEngine, systemProperties, desc); + persistenceUnitConfig.dialect().storageEngine(), systemProperties, desc); desc.setExcludeUnlistedClasses(true); Map> modelClassesAndPackagesPerPersistencesUnits = HibernateOrmProcessor @@ -282,44 +293,45 @@ private static ParsedPersistenceXmlDescriptor generateReactivePersistenceUnit( } // Physical Naming Strategy - persistenceUnitConfig.physicalNamingStrategy.ifPresent( + persistenceUnitConfig.physicalNamingStrategy().ifPresent( namingStrategy -> desc.getProperties() .setProperty(AvailableSettings.PHYSICAL_NAMING_STRATEGY, namingStrategy)); // Implicit Naming Strategy - persistenceUnitConfig.implicitNamingStrategy.ifPresent( + persistenceUnitConfig.implicitNamingStrategy().ifPresent( namingStrategy -> desc.getProperties() .setProperty(AvailableSettings.IMPLICIT_NAMING_STRATEGY, namingStrategy)); // Mapping - if (persistenceUnitConfig.mapping.timezone.timeZoneDefaultStorage.isPresent()) { + if (persistenceUnitConfig.mapping().timezone().timeZoneDefaultStorage().isPresent()) { desc.getProperties().setProperty(AvailableSettings.TIMEZONE_DEFAULT_STORAGE, - persistenceUnitConfig.mapping.timezone.timeZoneDefaultStorage.get().name()); + persistenceUnitConfig.mapping().timezone().timeZoneDefaultStorage().get().name()); } desc.getProperties().setProperty(AvailableSettings.PREFERRED_POOLED_OPTIMIZER, - persistenceUnitConfig.mapping.id.optimizer.idOptimizerDefault + persistenceUnitConfig.mapping().id().optimizer().idOptimizerDefault() .orElse(HibernateOrmConfigPersistenceUnit.IdOptimizerType.POOLED_LO).configName); //charset desc.getProperties().setProperty(AvailableSettings.HBM2DDL_CHARSET_NAME, - persistenceUnitConfig.database.charset.name()); + persistenceUnitConfig.database().charset().name()); // Quoting strategy - if (persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ALL - || persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS - || persistenceUnitConfig.database.globallyQuotedIdentifiers) { + if (persistenceUnitConfig.quoteIdentifiers().strategy() == IdentifierQuotingStrategy.ALL + || persistenceUnitConfig.quoteIdentifiers() + .strategy() == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS + || persistenceUnitConfig.database().globallyQuotedIdentifiers()) { desc.getProperties().setProperty(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, "true"); } - if (persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS) { + if (persistenceUnitConfig.quoteIdentifiers().strategy() == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS) { desc.getProperties().setProperty(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS_SKIP_COLUMN_DEFINITIONS, "true"); - } else if (persistenceUnitConfig.quoteIdentifiers.strategy == IdentifierQuotingStrategy.ONLY_KEYWORDS) { + } else if (persistenceUnitConfig.quoteIdentifiers().strategy() == IdentifierQuotingStrategy.ONLY_KEYWORDS) { desc.getProperties().setProperty(AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true"); } // Query // TODO ideally we should align on ORM and use 16 as a default, but that would break applications // because of https://github.com/hibernate/hibernate-reactive/issues/742 - int batchSize = firstPresent(persistenceUnitConfig.fetch.batchSize, persistenceUnitConfig.batchFetchSize) + int batchSize = firstPresent(persistenceUnitConfig.fetch().batchSize(), persistenceUnitConfig.batchFetchSize()) .orElse(-1); if (batchSize > 0) { desc.getProperties().setProperty(AvailableSettings.DEFAULT_BATCH_FETCH_SIZE, @@ -327,41 +339,41 @@ private static ParsedPersistenceXmlDescriptor generateReactivePersistenceUnit( desc.getProperties().setProperty(AvailableSettings.BATCH_FETCH_STYLE, BatchFetchStyle.PADDED.toString()); } - if (persistenceUnitConfig.fetch.maxDepth.isPresent()) { - setMaxFetchDepth(desc, persistenceUnitConfig.fetch.maxDepth); - } else if (persistenceUnitConfig.maxFetchDepth.isPresent()) { - setMaxFetchDepth(desc, persistenceUnitConfig.maxFetchDepth); + if (persistenceUnitConfig.fetch().maxDepth().isPresent()) { + setMaxFetchDepth(desc, persistenceUnitConfig.fetch().maxDepth()); + } else if (persistenceUnitConfig.maxFetchDepth().isPresent()) { + setMaxFetchDepth(desc, persistenceUnitConfig.maxFetchDepth()); } desc.getProperties().setProperty(AvailableSettings.QUERY_PLAN_CACHE_MAX_SIZE, Integer.toString( - persistenceUnitConfig.query.queryPlanCacheMaxSize)); + persistenceUnitConfig.query().queryPlanCacheMaxSize())); desc.getProperties().setProperty(AvailableSettings.DEFAULT_NULL_ORDERING, - persistenceUnitConfig.query.defaultNullOrdering.name().toLowerCase()); + persistenceUnitConfig.query().defaultNullOrdering().name().toLowerCase()); desc.getProperties().setProperty(AvailableSettings.IN_CLAUSE_PARAMETER_PADDING, - String.valueOf(persistenceUnitConfig.query.inClauseParameterPadding)); + String.valueOf(persistenceUnitConfig.query().inClauseParameterPadding())); // JDBC - persistenceUnitConfig.jdbc.timezone.ifPresent( + persistenceUnitConfig.jdbc().timezone().ifPresent( timezone -> desc.getProperties().setProperty(AvailableSettings.JDBC_TIME_ZONE, timezone)); - persistenceUnitConfig.jdbc.statementFetchSize.ifPresent( + persistenceUnitConfig.jdbc().statementFetchSize().ifPresent( fetchSize -> desc.getProperties().setProperty(AvailableSettings.STATEMENT_FETCH_SIZE, String.valueOf(fetchSize))); - persistenceUnitConfig.jdbc.statementBatchSize.ifPresent( + persistenceUnitConfig.jdbc().statementBatchSize().ifPresent( statementBatchSize -> desc.getProperties().setProperty(AvailableSettings.STATEMENT_BATCH_SIZE, String.valueOf(statementBatchSize))); // Statistics - if (hibernateOrmConfig.metrics.enabled - || (hibernateOrmConfig.statistics.isPresent() && hibernateOrmConfig.statistics.get())) { + if (hibernateOrmConfig.metrics().enabled() + || (hibernateOrmConfig.statistics().isPresent() && hibernateOrmConfig.statistics().get())) { desc.getProperties().setProperty(AvailableSettings.GENERATE_STATISTICS, "true"); } // sql-load-script - List importFiles = getSqlLoadScript(persistenceUnitConfig.sqlLoadScript, launchMode); + List importFiles = getSqlLoadScript(persistenceUnitConfig.sqlLoadScript(), launchMode); if (!importFiles.isEmpty()) { for (String importFile : importFiles) { @@ -371,19 +383,19 @@ private static ParsedPersistenceXmlDescriptor generateReactivePersistenceUnit( // enlist resource if present nativeImageResources.produce(new NativeImageResourceBuildItem(importFile)); hotDeploymentWatchedFiles.produce(new HotDeploymentWatchedFileBuildItem(importFile)); - } else if (persistenceUnitConfig.sqlLoadScript.isPresent()) { + } else if (persistenceUnitConfig.sqlLoadScript().isPresent()) { //raise exception if explicit file is not present (i.e. not the default) String propertyName = HibernateOrmRuntimeConfig.puPropertyKey(persistenceUnitConfigName, "sql-load-script"); throw new ConfigurationException( "Unable to find file referenced in '" + propertyName + "=" - + String.join(",", persistenceUnitConfig.sqlLoadScript.get()) + + String.join(",", persistenceUnitConfig.sqlLoadScript().get()) + "'. Remove property or add file to your path.", Collections.singleton(propertyName)); } } - if (persistenceUnitConfig.sqlLoadScript.isPresent()) { + if (persistenceUnitConfig.sqlLoadScript().isPresent()) { desc.getProperties().setProperty(AvailableSettings.HBM2DDL_IMPORT_FILES, String.join(",", importFiles)); } } else { @@ -392,7 +404,7 @@ private static ParsedPersistenceXmlDescriptor generateReactivePersistenceUnit( } // Caching - if (persistenceUnitConfig.secondLevelCachingEnabled) { + if (persistenceUnitConfig.secondLevelCachingEnabled()) { Properties p = desc.getProperties(); //Only set these if the user isn't making an explicit choice: p.putIfAbsent(USE_DIRECT_REFERENCE_CACHE_ENTRIES, Boolean.TRUE); diff --git a/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java index c4c11e2307d013..a70236bce167b0 100644 --- a/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java +++ b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest.java @@ -1,9 +1,12 @@ package io.quarkus.hibernate.reactive.config.datasource; +import static org.assertj.core.api.Assertions.assertThat; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.hibernate.reactive.config.MyEntity; +import io.quarkus.runtime.configuration.ConfigurationException; import io.quarkus.test.QuarkusUnitTest; public class EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest { @@ -13,7 +16,11 @@ public class EntitiesInDefaultPUWithExplicitUnconfiguredDatasourceTest { .withApplicationRoot((jar) -> jar .addClass(MyEntity.class)) .overrideConfigKey("quarkus.hibernate-orm.datasource", "ds-1") - .overrideConfigKey("quarkus.hibernate-orm.database.generation", "drop-and-create"); + .overrideConfigKey("quarkus.hibernate-orm.database.generation", "drop-and-create") + .assertException(t -> assertThat(t) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining( + "The default datasource must be configured for Hibernate Reactive. Refer to https://quarkus.io/guides/datasource for guidance.")); @Test public void testInvalidConfiguration() { diff --git a/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java index 74f0f25029c80a..495a8b8dc4fc69 100644 --- a/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java +++ b/extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/config/datasource/EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest.java @@ -1,9 +1,12 @@ package io.quarkus.hibernate.reactive.config.datasource; +import static org.assertj.core.api.Assertions.assertThat; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.hibernate.reactive.config.MyEntity; +import io.quarkus.runtime.configuration.ConfigurationException; import io.quarkus.test.QuarkusUnitTest; public class EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest { @@ -13,7 +16,11 @@ public class EntitiesInDefaultPUWithImplicitUnconfiguredDatasourceTest { .withApplicationRoot((jar) -> jar .addClass(MyEntity.class)) // The datasource won't be truly "unconfigured" if dev services are enabled - .overrideConfigKey("quarkus.devservices.enabled", "false"); + .overrideConfigKey("quarkus.devservices.enabled", "false") + .assertException(t -> assertThat(t) + .isInstanceOf(ConfigurationException.class) + .hasMessageContaining( + "The default datasource must be configured for Hibernate Reactive. Refer to https://quarkus.io/guides/datasource for guidance.")); @Test public void testInvalidConfiguration() { diff --git a/extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/FastBootHibernateReactivePersistenceProvider.java b/extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/FastBootHibernateReactivePersistenceProvider.java index 5faa419621b96d..9fb08c246cd3fc 100644 --- a/extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/FastBootHibernateReactivePersistenceProvider.java +++ b/extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/FastBootHibernateReactivePersistenceProvider.java @@ -118,8 +118,6 @@ private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String throw new PersistenceException("No name provided and multiple persistence units found"); } - Map puConfigMap = hibernateOrmRuntimeConfig - .getAllPersistenceUnitConfigsAsMap(); for (RuntimePersistenceUnitDescriptor persistenceUnit : units) { log.debugf( "Checking persistence-unit [name=%s, explicit-provider=%s] against incoming persistence unit name [%s]", @@ -146,9 +144,8 @@ private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String RuntimeSettings.Builder runtimeSettingsBuilder = new RuntimeSettings.Builder(buildTimeSettings, integrationSettings); - var puConfig = puConfigMap.getOrDefault(persistenceUnit.getConfigurationName(), - new HibernateOrmRuntimeConfigPersistenceUnit()); - if (puConfig.active.isPresent() && !puConfig.active.get()) { + var puConfig = hibernateOrmRuntimeConfig.persistenceUnits().get(persistenceUnit.getConfigurationName()); + if (puConfig.active().isPresent() && !puConfig.active().get()) { throw new IllegalStateException( "Attempting to boot a deactivated Hibernate Reactive persistence unit"); } @@ -166,7 +163,7 @@ private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String } } - if (!puConfig.unsupportedProperties.isEmpty()) { + if (!puConfig.unsupportedProperties().isEmpty()) { log.warnf("Persistence-unit [%s] sets unsupported properties." + " These properties may not work correctly, and even if they do," + " that may change when upgrading to a newer version of Quarkus (even just a micro/patch version)." @@ -175,9 +172,9 @@ private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String + " and more importantly so that the configuration property is tested regularly." + " Unsupported properties being set: %s", persistenceUnitName, - puConfig.unsupportedProperties.keySet()); + puConfig.unsupportedProperties().keySet()); } - for (Map.Entry entry : puConfig.unsupportedProperties.entrySet()) { + for (Map.Entry entry : puConfig.unsupportedProperties().entrySet()) { var key = entry.getKey(); if (runtimeSettingsBuilder.get(key) != null) { log.warnf("Persistence-unit [%s] sets property '%s' to a custom value through '%s'," @@ -311,12 +308,12 @@ private static void injectRuntimeConfiguration(HibernateOrmRuntimeConfigPersiste Builder runtimeSettingsBuilder) { // Database runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, - persistenceUnitConfig.database.generation.generation); + persistenceUnitConfig.database().generation().generation()); runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_CREATE_SCHEMAS, - String.valueOf(persistenceUnitConfig.database.generation.createSchemas)); + String.valueOf(persistenceUnitConfig.database().generation().createSchemas())); - if (persistenceUnitConfig.database.generation.haltOnError) { + if (persistenceUnitConfig.database().generation().haltOnError()) { runtimeSettingsBuilder.put(AvailableSettings.HBM2DDL_HALT_ON_ERROR, "true"); } @@ -324,41 +321,41 @@ private static void injectRuntimeConfiguration(HibernateOrmRuntimeConfigPersiste runtimeSettingsBuilder.put(AvailableSettings.HBM2DDL_SCRIPTS_CREATE_APPEND, "false"); runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION, - persistenceUnitConfig.scripts.generation.generation); + persistenceUnitConfig.scripts().generation().generation()); - if (persistenceUnitConfig.scripts.generation.createTarget.isPresent()) { + if (persistenceUnitConfig.scripts().generation().createTarget().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_CREATE_TARGET, - persistenceUnitConfig.scripts.generation.createTarget.get()); + persistenceUnitConfig.scripts().generation().createTarget().get()); } - if (persistenceUnitConfig.scripts.generation.dropTarget.isPresent()) { + if (persistenceUnitConfig.scripts().generation().dropTarget().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_DROP_TARGET, - persistenceUnitConfig.scripts.generation.dropTarget.get()); + persistenceUnitConfig.scripts().generation().dropTarget().get()); } - persistenceUnitConfig.database.defaultCatalog.ifPresent( + persistenceUnitConfig.database().defaultCatalog().ifPresent( catalog -> runtimeSettingsBuilder.put(AvailableSettings.DEFAULT_CATALOG, catalog)); - persistenceUnitConfig.database.defaultSchema.ifPresent( + persistenceUnitConfig.database().defaultSchema().ifPresent( schema -> runtimeSettingsBuilder.put(AvailableSettings.DEFAULT_SCHEMA, schema)); // Logging - if (persistenceUnitConfig.log.sql) { + if (persistenceUnitConfig.log().sql()) { runtimeSettingsBuilder.put(AvailableSettings.SHOW_SQL, "true"); - if (persistenceUnitConfig.log.formatSql) { + if (persistenceUnitConfig.log().formatSql()) { runtimeSettingsBuilder.put(AvailableSettings.FORMAT_SQL, "true"); } } - if (persistenceUnitConfig.log.jdbcWarnings.isPresent()) { + if (persistenceUnitConfig.log().jdbcWarnings().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.LOG_JDBC_WARNINGS, - persistenceUnitConfig.log.jdbcWarnings.get().toString()); + persistenceUnitConfig.log().jdbcWarnings().get().toString()); } - if (persistenceUnitConfig.log.queriesSlowerThanMs.isPresent()) { + if (persistenceUnitConfig.log().queriesSlowerThanMs().isPresent()) { runtimeSettingsBuilder.put(AvailableSettings.LOG_SLOW_QUERY, - persistenceUnitConfig.log.queriesSlowerThanMs.get()); + persistenceUnitConfig.log().queriesSlowerThanMs().get()); } } diff --git a/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/resources/application-start-offline.properties b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/resources/application-start-offline.properties index e83c72ada1bf42..8cadcad4a8d958 100644 --- a/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/resources/application-start-offline.properties +++ b/extensions/hibernate-search-orm-elasticsearch/deployment/src/test/resources/application-start-offline.properties @@ -3,7 +3,7 @@ quarkus.datasource.jdbc.url=jdbc:h2:mem:default;DB_CLOSE_DELAY=-1 quarkus.hibernate-orm.database.generation=drop-and-create -quarkus.hibernate-search-orm.elasticsearch.version=8.9 +quarkus.hibernate-search-orm.elasticsearch.version=8.12 # Simulate an offline Elasticsearch instance by pointing to a non-existing cluster quarkus.hibernate-search-orm.elasticsearch.hosts=localhost:14800 quarkus.hibernate-search-orm.schema-management.strategy=none diff --git a/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/AbstractLocaleResolver.java b/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/AbstractLocaleResolver.java index 2c5674b07e85e8..d5d2f70d344fbd 100644 --- a/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/AbstractLocaleResolver.java +++ b/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/AbstractLocaleResolver.java @@ -8,12 +8,17 @@ import org.hibernate.validator.spi.messageinterpolation.LocaleResolver; import org.hibernate.validator.spi.messageinterpolation.LocaleResolverContext; import org.jboss.logging.Logger; +import org.jboss.resteasy.reactive.common.util.CaseInsensitiveMap; abstract class AbstractLocaleResolver implements LocaleResolver { private static final Logger log = Logger.getLogger(AbstractLocaleResolver.class); private static final String ACCEPT_HEADER = "Accept-Language"; + /** + * @return case-insensitive map + * @see CaseInsensitiveMap + */ protected abstract Map> getHeaders(); @Override diff --git a/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/ResteasyReactiveLocaleResolver.java b/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/ResteasyReactiveLocaleResolver.java index ff3565546883c8..7756a2f5476268 100644 --- a/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/ResteasyReactiveLocaleResolver.java +++ b/extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/locale/ResteasyReactiveLocaleResolver.java @@ -1,12 +1,13 @@ package io.quarkus.hibernate.validator.runtime.locale; -import java.util.HashMap; import java.util.List; import java.util.Map; import jakarta.inject.Inject; import jakarta.inject.Singleton; +import org.jboss.resteasy.reactive.common.util.CaseInsensitiveMap; + import io.quarkus.arc.Arc; import io.quarkus.arc.ManagedContext; import io.quarkus.vertx.http.runtime.CurrentVertxRequest; @@ -31,7 +32,7 @@ protected Map> getHeaders() { } RoutingContext current = currentVertxRequest.getCurrent(); if (current != null) { - Map> result = new HashMap<>(); + Map> result = new CaseInsensitiveMap(); MultiMap headers = current.request().headers(); for (String name : headers.names()) { result.put(name, headers.getAll(name)); diff --git a/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java b/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java index 7e11774606f90d..279d2f08467ba6 100644 --- a/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java +++ b/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java @@ -47,7 +47,6 @@ import org.infinispan.protostream.FileDescriptorSource; import org.infinispan.protostream.GeneratedSchema; import org.infinispan.protostream.MessageMarshaller; -import org.infinispan.protostream.RawProtobufMarshaller; import org.infinispan.protostream.SerializationContextInitializer; import org.infinispan.protostream.WrappedMessage; import org.jboss.jandex.AnnotationInstance; @@ -142,20 +141,7 @@ NativeImageFeatureBuildItem nativeImageFeature() { @BuildStep public void handleProtoStreamRequirements(BuildProducer protostreamPropertiesBuildItem) throws ClassNotFoundException { - // We only apply this if we are in native mode in build time to apply to the properties - // Note that the other half is done in QuerySubstitutions.SubstituteMarshallerRegistration class - // Note that the registration of these files are done twice in normal VM mode - // (once during init and once at runtime) Properties properties = new Properties(); - try { - properties.put(PROTOBUF_FILE_PREFIX + WrappedMessage.PROTO_FILE, - getContents("/" + WrappedMessage.PROTO_FILE)); - String queryProtoFile = "org/infinispan/query/remote/client/query.proto"; - properties.put(PROTOBUF_FILE_PREFIX + queryProtoFile, getContents("/" + queryProtoFile)); - } catch (Exception ex) { - // Do nothing if fails - } - Map marshallers = new HashMap<>(); initMarshaller(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME, infinispanClientsBuildTimeConfig.defaultInfinispanClient.marshallerClass, marshallers); @@ -205,6 +191,8 @@ InfinispanPropertiesBuildItem setup(ApplicationArchivesBuildItem applicationArch additionalBeans.produce(AdditionalBeanBuildItem.builder().addBeanClass(InfinispanClientName.class).build()); additionalBeans.produce(AdditionalBeanBuildItem.builder().addBeanClass(Remote.class).build()); + resourceBuildItem.produce(new NativeImageResourceBuildItem("proto/generated/query.proto")); + resourceBuildItem.produce(new NativeImageResourceBuildItem(WrappedMessage.PROTO_FILE)); hotDeployment .produce(new HotDeploymentWatchedFileBuildItem(META_INF + File.separator + DEFAULT_HOTROD_CLIENT_PROPERTIES)); @@ -460,7 +448,7 @@ private void addMaxEntries(String clientName, InfinispanClientBuildTimeConfig co @BuildStep UnremovableBeanBuildItem ensureBeanLookupAvailable() { return UnremovableBeanBuildItem.beanTypes(BaseMarshaller.class, EnumMarshaller.class, MessageMarshaller.class, - RawProtobufMarshaller.class, FileDescriptorSource.class); + FileDescriptorSource.class); } @BuildStep diff --git a/extensions/infinispan-client/runtime/pom.xml b/extensions/infinispan-client/runtime/pom.xml index a9ff61b97ae812..ad1a959e7ad57c 100644 --- a/extensions/infinispan-client/runtime/pom.xml +++ b/extensions/infinispan-client/runtime/pom.xml @@ -67,7 +67,7 @@ org.infinispan - infinispan-client-hotrod-jakarta + infinispan-client-hotrod org.infinispan diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/graal/QuerySubstitutions.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/graal/QuerySubstitutions.java deleted file mode 100644 index 1548c1945e48a4..00000000000000 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/graal/QuerySubstitutions.java +++ /dev/null @@ -1,27 +0,0 @@ -package io.quarkus.infinispan.client.runtime.graal; - -import java.io.IOException; - -import org.infinispan.protostream.SerializationContext; -import org.infinispan.query.remote.client.impl.MarshallerRegistration; - -import com.oracle.svm.core.annotate.Substitute; -import com.oracle.svm.core.annotate.TargetClass; - -/** - * Class that has all the query substitutions necessary to remove code that is loaded when proto marshaller is in use - * - * @author William Burns - */ -final class QuerySubstitutions { -} - -@TargetClass(value = MarshallerRegistration.class) -final class SubstituteMarshallerRegistration { - @Substitute - public static void init(SerializationContext ctx) throws IOException { - // Skip loading the proto definition files as this was already done at compile time with - // HandleProtostreamMarshaller#handleQueryRequirements - MarshallerRegistration.INSTANCE.registerMarshallers(ctx); - } -} diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/graal/SubstituteExecutorFactoryConfigurationBuilder.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/graal/SubstituteExecutorFactoryConfigurationBuilder.java deleted file mode 100644 index fffc6ce238c30e..00000000000000 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/graal/SubstituteExecutorFactoryConfigurationBuilder.java +++ /dev/null @@ -1,43 +0,0 @@ -package io.quarkus.infinispan.client.runtime.graal; - -import java.util.Properties; - -import org.infinispan.client.hotrod.configuration.ExecutorFactoryConfiguration; -import org.infinispan.client.hotrod.configuration.ExecutorFactoryConfigurationBuilder; -import org.infinispan.client.hotrod.impl.async.DefaultAsyncExecutorFactory; -import org.infinispan.commons.executors.ExecutorFactory; -import org.infinispan.commons.util.TypedProperties; - -import com.oracle.svm.core.annotate.Alias; -import com.oracle.svm.core.annotate.Substitute; -import com.oracle.svm.core.annotate.TargetClass; - -/** - * Avoids using reflection for DefaultAsyncExecutorFactory class - * - * @author William Burns - */ -@TargetClass(ExecutorFactoryConfigurationBuilder.class) -public final class SubstituteExecutorFactoryConfigurationBuilder { - @Alias - private ExecutorFactory factory; - @Alias - private Properties properties; - - @Substitute - public SubstituteExecutorFactoryConfiguration create() { - if (factory != null) - return new SubstituteExecutorFactoryConfiguration(factory, TypedProperties.toTypedProperties(properties)); - else - return new SubstituteExecutorFactoryConfiguration(new DefaultAsyncExecutorFactory(), - TypedProperties.toTypedProperties(properties)); - } -} - -@TargetClass(ExecutorFactoryConfiguration.class) -final class SubstituteExecutorFactoryConfiguration { - @Alias - SubstituteExecutorFactoryConfiguration(ExecutorFactory factory, TypedProperties properties) { - } - -} diff --git a/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java b/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java index db3c79aebfacdc..abc4f56e908937 100644 --- a/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java +++ b/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java @@ -14,6 +14,7 @@ import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.NativeImageFeatureBuildItem; import io.quarkus.deployment.builditem.SslNativeConfigBuildItem; +import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; import io.quarkus.deployment.pkg.steps.NativeOrNativeSourcesBuild; import io.quarkus.jdbc.postgresql.runtime.PostgreSQLAgroalConnectionConfigurer; @@ -32,6 +33,11 @@ NativeImageFeatureBuildItem nativeImageFeature() { return new NativeImageFeatureBuildItem(SQLXMLFeature.class); } + @BuildStep(onlyIf = NativeOrNativeSourcesBuild.class) + RuntimeReinitializedClassBuildItem runtimeReinitialize() { + return new RuntimeReinitializedClassBuildItem("org.postgresql.util.PasswordUtil$SecureRandomHolder"); + } + @BuildStep void registerDriver(BuildProducer jdbcDriver, SslNativeConfigBuildItem sslNativeConfigBuildItem) { diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaNativeContainer.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaNativeContainer.java index b6fff5fd04a9c4..2381592c4fd5b8 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaNativeContainer.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaNativeContainer.java @@ -1,12 +1,10 @@ package io.quarkus.kafka.client.deployment; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.images.builder.Transferable; import org.testcontainers.utility.DockerImageName; import com.github.dockerjava.api.command.InspectContainerResponse; @@ -59,10 +57,13 @@ protected void containerIsStarting(InspectContainerResponse containerInfo, boole cmd += " " + additionalArgs; } - //noinspection OctalInteger - copyFileToContainer( - Transferable.of(cmd.getBytes(StandardCharsets.UTF_8), 0777), - STARTER_SCRIPT); + // docker exec since docker cp doesn't work with kubedock yet + try { + execInContainer("sh", "-c", + String.format("echo -e \"%1$s\" >> %2$s && chmod 777 %2$s", cmd, STARTER_SCRIPT)); + } catch (Exception e) { + throw new RuntimeException("Can't create run script in the Kafka native container.", e); + } } private String getKafkaAdvertisedListeners() { diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java index 3a574b4424f850..18549b1f92eb4f 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java @@ -415,6 +415,9 @@ public void withSasl(CombinedIndexBuildItem index, reflectiveClassCondition.produce(new ReflectiveClassConditionBuildItem( "org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerValidatorCallbackHandler", "org.jose4j.keys.resolvers.VerificationKeyResolver")); + reflectiveClassCondition.produce(new ReflectiveClassConditionBuildItem( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerValidatorCallbackHandler", + "org.jose4j.keys.resolvers.VerificationKeyResolver")); } private void registerJDKLoginModules(BuildProducer reflectiveClass) { diff --git a/extensions/kafka-client/deployment/src/main/resources/dev-ui/qwc-kafka-messages.js b/extensions/kafka-client/deployment/src/main/resources/dev-ui/qwc-kafka-messages.js index 9ff8fcca678770..9164a203ee2231 100644 --- a/extensions/kafka-client/deployment/src/main/resources/dev-ui/qwc-kafka-messages.js +++ b/extensions/kafka-client/deployment/src/main/resources/dev-ui/qwc-kafka-messages.js @@ -7,14 +7,16 @@ import { columnBodyRenderer, gridRowDetailsRenderer } from '@vaadin/grid/lit.js' import '@vaadin/dialog'; import { dialogRenderer } from '@vaadin/dialog/lit.js'; import '@vaadin/button'; -import 'qui-code-block'; +import { observeState } from 'lit-element-state'; +import { themeState } from 'theme-state'; +import '@quarkus-webcomponents/codeblock'; import '@vaadin/split-layout'; import './qwc-kafka-add-message.js'; /** * This component shows the Kafka Messages for a certain topic */ -export class QwcKafkaMessages extends QwcHotReloadElement { +export class QwcKafkaMessages extends observeState(QwcHotReloadElement) { static styles = css` .kafka { @@ -171,7 +173,8 @@ export class QwcKafkaMessages extends QwcHotReloadElement {

+ content='${message.value}' + theme='${themeState.theme.name}'>
diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/KafkaTopicClient.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/KafkaTopicClient.java index 85d672c2ea45f1..2cfd5b990dd7f0 100644 --- a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/KafkaTopicClient.java +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/KafkaTopicClient.java @@ -249,8 +249,9 @@ private void assertRequestedPartitionsExist(String topicName, Collection(request.getTopic(), request.getPartition(), Bytes.wrap(request.getKey().getBytes()), - Bytes.wrap(request.getValue().getBytes())); + var record = new ProducerRecord<>(request.getTopic(), request.getPartition(), + Bytes.wrap(request.getKey().getBytes(StandardCharsets.UTF_8)), + Bytes.wrap(request.getValue().getBytes(StandardCharsets.UTF_8))); Optional.ofNullable(request.getHeaders()) .orElseGet(Collections::emptyMap) diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/model/converter/KafkaModelConverter.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/model/converter/KafkaModelConverter.java index 59101e9c11d9e6..2b14ee5c8483a5 100644 --- a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/model/converter/KafkaModelConverter.java +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/model/converter/KafkaModelConverter.java @@ -19,8 +19,12 @@ public KafkaMessage convert(ConsumerRecord message) { message.partition(), message.offset(), message.timestamp(), - Optional.ofNullable(message.key()).map(Bytes::toString).orElse(null), - Optional.ofNullable(message.value()).map(Bytes::toString).orElse(null), + Optional.ofNullable(message.key()).map((t) -> { + return new String(t.get(), StandardCharsets.UTF_8); + }).orElse(null), + Optional.ofNullable(message.value()).map((t) -> { + return new String(t.get(), StandardCharsets.UTF_8); + }).orElse(null), headers(message)); } diff --git a/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java b/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java index 112d70f346739e..5269d116c4bc73 100644 --- a/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java +++ b/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java @@ -96,6 +96,12 @@ private void registerCompulsoryClasses(BuildProducer r reflectiveClasses.produce(ReflectiveClassBuildItem .builder("org.apache.kafka.streams.processor.internals.StateDirectory$StateDirectoryProcessFile") .methods().fields().build()); + + // Listed in BuiltInDslStoreSuppliers + reflectiveClasses.produce(ReflectiveClassBuildItem + .builder(org.apache.kafka.streams.state.BuiltInDslStoreSuppliers.RocksDBDslStoreSuppliers.class, + org.apache.kafka.streams.state.BuiltInDslStoreSuppliers.InMemoryDslStoreSuppliers.class) + .build()); } private void registerClassesThatClientMaySpecify(BuildProducer reflectiveClasses, @@ -103,6 +109,17 @@ private void registerClassesThatClientMaySpecify(BuildProducer reflectiveClasses, + Properties kafkaStreamsProperties) { + String dlsStoreSupplierClassName = kafkaStreamsProperties + .getProperty(StreamsConfig.DSL_STORE_SUPPLIERS_CLASS_CONFIG); + + if (dlsStoreSupplierClassName != null) { + registerClassName(reflectiveClasses, dlsStoreSupplierClassName); + } } private void registerExceptionHandler(BuildProducer reflectiveClasses, diff --git a/extensions/kafka-streams/deployment/src/main/resources/dev-ui/qwc-kafka-streams-topology.js b/extensions/kafka-streams/deployment/src/main/resources/dev-ui/qwc-kafka-streams-topology.js index 98d40dd857ada0..f3f288eac6c513 100644 --- a/extensions/kafka-streams/deployment/src/main/resources/dev-ui/qwc-kafka-streams-topology.js +++ b/extensions/kafka-streams/deployment/src/main/resources/dev-ui/qwc-kafka-streams-topology.js @@ -9,12 +9,14 @@ import '@vaadin/details'; import '@vaadin/tabs'; import '@vaadin/vertical-layout'; import 'qui-badge'; -import 'qui-code-block'; +import { observeState } from 'lit-element-state'; +import { themeState } from 'theme-state'; +import '@quarkus-webcomponents/codeblock'; /** * This component shows the Kafka Streams Topology */ -export class QwcKafkaStreamsTopology extends QwcHotReloadElement { +export class QwcKafkaStreamsTopology extends observeState(QwcHotReloadElement) { jsonRpc = new JsonRpc(this); @@ -132,15 +134,15 @@ export class QwcKafkaStreamsTopology extends QwcHotReloadElement { } _selectDescribeTab() { - this._tabContent = html``; + this._tabContent = html``; } _selectGraphvizTab() { - this._tabContent = html``; + this._tabContent = html``; } _selectMermaidTab() { - this._tabContent = html``; + this._tabContent = html``; } } customElements.define('qwc-kafka-streams-topology', QwcKafkaStreamsTopology); \ No newline at end of file diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java index 4318cc16c35d2d..604017f8315911 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java @@ -15,13 +15,12 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Function; import java.util.stream.Collectors; -import jakarta.annotation.PostConstruct; +import jakarta.enterprise.event.Event; import jakarta.enterprise.event.Observes; import jakarta.enterprise.inject.Instance; import jakarta.enterprise.inject.Produces; @@ -44,10 +43,9 @@ import org.eclipse.microprofile.config.ConfigProvider; import org.jboss.logging.Logger; -import io.quarkus.arc.Arc; import io.quarkus.arc.Unremovable; import io.quarkus.runtime.ShutdownEvent; -import io.quarkus.runtime.Startup; +import io.quarkus.runtime.StartupEvent; import io.smallrye.common.annotation.Identifier; /** @@ -64,12 +62,16 @@ public class KafkaStreamsProducer { private static volatile boolean shutdown = false; private final ExecutorService executorService; + private final StreamsConfig streamsConfig; private final KafkaStreams kafkaStreams; private final KafkaStreamsTopologyManager kafkaStreamsTopologyManager; private final Admin kafkaAdminClient; + private final Duration topicsTimeout; + private final List trimmedTopics; @Inject public KafkaStreamsProducer(KafkaStreamsSupport kafkaStreamsSupport, KafkaStreamsRuntimeConfig runtimeConfig, + ExecutorService executorService, Instance topology, Instance kafkaClientSupplier, @Identifier("default-kafka-broker") Instance> defaultConfiguration, Instance stateListener, Instance globalStateRestoreListener, @@ -79,9 +81,12 @@ public KafkaStreamsProducer(KafkaStreamsSupport kafkaStreamsSupport, KafkaStream if (topology.isUnsatisfied()) { LOGGER.warn("No Topology producer; Kafka Streams will not be started"); this.executorService = null; + this.streamsConfig = null; this.kafkaStreams = null; this.kafkaStreamsTopologyManager = null; this.kafkaAdminClient = null; + this.topicsTimeout = null; + this.trimmedTopics = null; return; } @@ -101,25 +106,43 @@ public KafkaStreamsProducer(KafkaStreamsSupport kafkaStreamsSupport, KafkaStream runtimeConfig); this.kafkaAdminClient = Admin.create(getAdminClientConfig(kafkaStreamsProperties)); - this.executorService = Executors.newSingleThreadExecutor(); + this.executorService = executorService; - this.kafkaStreams = initializeKafkaStreams(kafkaStreamsProperties, runtimeConfig, kafkaAdminClient, topology.get(), - kafkaClientSupplier, stateListener, globalStateRestoreListener, uncaughtExceptionHandlerListener, - executorService); + this.topicsTimeout = runtimeConfig.topicsTimeout; + this.trimmedTopics = isTopicsCheckEnabled() ? runtimeConfig.getTrimmedTopics() : Collections.emptyList(); + this.streamsConfig = new StreamsConfig(kafkaStreamsProperties); + this.kafkaStreams = initializeKafkaStreams(streamsConfig, topology.get(), + kafkaClientSupplier, stateListener, globalStateRestoreListener, uncaughtExceptionHandlerListener); this.kafkaStreamsTopologyManager = new KafkaStreamsTopologyManager(kafkaAdminClient); } - @PostConstruct - public void postConstruct() { + private boolean isTopicsCheckEnabled() { + return topicsTimeout.compareTo(Duration.ZERO) > 0; + } + + public void onStartup(@Observes StartupEvent event, Event kafkaStreamsEvent) { if (kafkaStreams != null) { - Arc.container().beanManager().getEvent().select(KafkaStreams.class).fire(kafkaStreams); + kafkaStreamsEvent.fire(kafkaStreams); + executorService.execute(() -> { + if (isTopicsCheckEnabled()) { + try { + waitForTopicsToBeCreated(kafkaAdminClient, trimmedTopics, topicsTimeout); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return; + } + } + if (!shutdown) { + LOGGER.debug("Starting Kafka Streams pipeline"); + kafkaStreams.start(); + } + }); } } @Produces @Singleton @Unremovable - @Startup public KafkaStreams getKafkaStreams() { return kafkaStreams; } @@ -127,7 +150,13 @@ public KafkaStreams getKafkaStreams() { @Produces @Singleton @Unremovable - @Startup + public StreamsConfig getStreamsConfig() { + return streamsConfig; + } + + @Produces + @Singleton + @Unremovable public KafkaStreamsTopologyManager kafkaStreamsTopologyManager() { return kafkaStreamsTopologyManager; } @@ -146,16 +175,15 @@ void onStop(@Observes ShutdownEvent event) { } } - private static KafkaStreams initializeKafkaStreams(Properties kafkaStreamsProperties, - KafkaStreamsRuntimeConfig runtimeConfig, Admin adminClient, Topology topology, + private static KafkaStreams initializeKafkaStreams(StreamsConfig streamsConfig, Topology topology, Instance kafkaClientSupplier, Instance stateListener, Instance globalStateRestoreListener, - Instance uncaughtExceptionHandlerListener, ExecutorService executorService) { + Instance uncaughtExceptionHandlerListener) { KafkaStreams kafkaStreams; if (kafkaClientSupplier.isUnsatisfied()) { - kafkaStreams = new KafkaStreams(topology, kafkaStreamsProperties); + kafkaStreams = new KafkaStreams(topology, streamsConfig); } else { - kafkaStreams = new KafkaStreams(topology, kafkaStreamsProperties, kafkaClientSupplier.get()); + kafkaStreams = new KafkaStreams(topology, streamsConfig, kafkaClientSupplier.get()); } if (!stateListener.isUnsatisfied()) { @@ -168,21 +196,6 @@ private static KafkaStreams initializeKafkaStreams(Properties kafkaStreamsProper kafkaStreams.setUncaughtExceptionHandler(uncaughtExceptionHandlerListener.get()); } - executorService.execute(() -> { - if (runtimeConfig.topicsTimeout.compareTo(Duration.ZERO) > 0) { - try { - waitForTopicsToBeCreated(adminClient, runtimeConfig.getTrimmedTopics(), runtimeConfig.topicsTimeout); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return; - } - } - if (!shutdown) { - LOGGER.debug("Starting Kafka Streams pipeline"); - kafkaStreams.start(); - } - }); - return kafkaStreams; } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java index 7e4e6347650d9c..7a5b58d54a5802 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java @@ -16,7 +16,7 @@ public class KafkaStreamsRuntimeConfig { /** * Default Kafka bootstrap server. */ - public static final String DEFAULT_KAFKA_BROKER = "localhost:9012"; + public static final String DEFAULT_KAFKA_BROKER = "localhost:9092"; /** * A unique identifier for this Kafka Streams application. @@ -27,7 +27,7 @@ public class KafkaStreamsRuntimeConfig { /** * A comma-separated list of host:port pairs identifying the Kafka bootstrap server(s). - * If not set, fallback to {@code kafka.bootstrap.servers}, and if not set either use {@code localhost:9012}. + * If not set, fallback to {@code kafka.bootstrap.servers}, and if not set either use {@code localhost:9092}. */ @ConfigItem(defaultValue = DEFAULT_KAFKA_BROKER) public List bootstrapServers; diff --git a/extensions/keycloak-admin-client-reactive/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveClientProvider.java b/extensions/keycloak-admin-client-reactive/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveClientProvider.java index c39ffee71d45a3..a8004dd19c049b 100644 --- a/extensions/keycloak-admin-client-reactive/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveClientProvider.java +++ b/extensions/keycloak-admin-client-reactive/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveClientProvider.java @@ -28,7 +28,8 @@ public class ResteasyReactiveClientProvider implements ResteasyClientProvider { private static final List HANDLED_MEDIA_TYPES = List.of(MediaType.APPLICATION_JSON); - private static final int PROVIDER_PRIORITY = Priorities.USER + 100; // ensures that it will be used first + private static final int WRITER_PROVIDER_PRIORITY = Priorities.USER + 100; // ensures that it will be used first + private static final int READER_PROVIDER_PRIORITY = Priorities.USER - 100; // ensures that it will be used first private final boolean tlsTrustAll; @@ -77,9 +78,9 @@ private ClientBuilderImpl registerJacksonProviders(ClientBuilderImpl clientBuild clientBuilder = clientBuilder .registerMessageBodyReader(new JacksonBasicMessageBodyReader(newObjectMapper), Object.class, HANDLED_MEDIA_TYPES, true, - PROVIDER_PRIORITY) + READER_PROVIDER_PRIORITY) .registerMessageBodyWriter(new ClientJacksonMessageBodyWriter(newObjectMapper), Object.class, - HANDLED_MEDIA_TYPES, true, PROVIDER_PRIORITY); + HANDLED_MEDIA_TYPES, true, WRITER_PROVIDER_PRIORITY); } InstanceHandle clientLogger = arcContainer.instance(ClientLogger.class); if (clientLogger.isAvailable()) { diff --git a/extensions/keycloak-admin-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientProcessor.java b/extensions/keycloak-admin-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientProcessor.java index 5ac5f6fef32376..aa65258d906e45 100644 --- a/extensions/keycloak-admin-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientProcessor.java +++ b/extensions/keycloak-admin-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientProcessor.java @@ -13,6 +13,8 @@ import org.keycloak.json.StringOrArraySerializer; import io.quarkus.arc.deployment.SyntheticBeanBuildItem; +import io.quarkus.deployment.Capabilities; +import io.quarkus.deployment.Capability; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.ExecutionTime; @@ -49,8 +51,10 @@ ReflectiveClassBuildItem reflect() { @Record(ExecutionTime.STATIC_INIT) @Produce(ServiceStartBuildItem.class) @BuildStep - public void integrate(ResteasyKeycloakAdminClientRecorder recorder, TlsConfig tlsConfig) { - recorder.setClientProvider(tlsConfig.trustAll); + public void integrate(ResteasyKeycloakAdminClientRecorder recorder, TlsConfig tlsConfig, Capabilities capabilities) { + boolean areJSONBProvidersPresent = capabilities.isPresent(Capability.RESTEASY_JSON_JSONB) + || capabilities.isPresent(Capability.RESTEASY_JSON_JSONB_CLIENT); + recorder.setClientProvider(tlsConfig.trustAll, areJSONBProvidersPresent); } @Record(ExecutionTime.RUNTIME_INIT) diff --git a/extensions/keycloak-admin-client/runtime/src/main/java/io/quarkus/keycloak/adminclient/ResteasyKeycloakAdminClientRecorder.java b/extensions/keycloak-admin-client/runtime/src/main/java/io/quarkus/keycloak/adminclient/ResteasyKeycloakAdminClientRecorder.java index 75fb6d29248964..0ded9fe8b53073 100644 --- a/extensions/keycloak-admin-client/runtime/src/main/java/io/quarkus/keycloak/adminclient/ResteasyKeycloakAdminClientRecorder.java +++ b/extensions/keycloak-admin-client/runtime/src/main/java/io/quarkus/keycloak/adminclient/ResteasyKeycloakAdminClientRecorder.java @@ -6,7 +6,10 @@ import javax.net.ssl.SSLContext; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.Produces; import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.core.MediaType; import org.keycloak.admin.client.ClientBuilderWrapper; import org.keycloak.admin.client.Keycloak; @@ -14,6 +17,7 @@ import org.keycloak.admin.client.spi.ResteasyClientClassicProvider; import io.quarkus.keycloak.admin.client.common.KeycloakAdminClientConfig; +import io.quarkus.resteasy.common.runtime.jackson.QuarkusJacksonSerializer; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; @@ -58,14 +62,25 @@ public Keycloak get() { }; } - public void setClientProvider(boolean tlsTrustAll) { + public void setClientProvider(boolean tlsTrustAll, boolean areJSONBProvidersPresent) { Keycloak.setClientProvider(new ResteasyClientClassicProvider() { @Override public Client newRestEasyClient(Object customJacksonProvider, SSLContext sslContext, boolean disableTrustManager) { // point here is to use default Quarkus providers rather than org.keycloak.admin.client.JacksonProvider // as it doesn't work properly in native mode - return ClientBuilderWrapper.create(sslContext, tlsTrustAll || disableTrustManager).build(); + var builder = ClientBuilderWrapper.create(sslContext, tlsTrustAll || disableTrustManager); + if (areJSONBProvidersPresent) { + // when both Jackson and JSONB providers are present, we need to ensure Jackson is used + builder.register(new AppJsonQuarkusJacksonSerializer(), 100); + } + return builder.build(); } }); } + + // makes media type more specific which ensures that it will be used first + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_JSON) + static class AppJsonQuarkusJacksonSerializer extends QuarkusJacksonSerializer { + } } diff --git a/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/KubernetesClientProcessor.java b/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/KubernetesClientProcessor.java index 6c3ddb8b118c36..67411a4d7fe8af 100644 --- a/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/KubernetesClientProcessor.java +++ b/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/KubernetesClientProcessor.java @@ -26,6 +26,7 @@ import io.fabric8.kubernetes.api.model.KubeSchema; import io.fabric8.kubernetes.api.model.KubernetesResource; import io.fabric8.kubernetes.api.model.KubernetesResourceList; +import io.fabric8.kubernetes.api.model.ValidationSchema; import io.fabric8.kubernetes.client.Config; import io.fabric8.kubernetes.client.CustomResource; import io.fabric8.kubernetes.client.DefaultKubernetesClient; @@ -67,6 +68,7 @@ public class KubernetesClientProcessor { private static final DotName RESOURCE_EVENT_HANDLER = DotName .createSimple(io.fabric8.kubernetes.client.informers.ResourceEventHandler.class.getName()); private static final DotName KUBERNETES_RESOURCE = DotName.createSimple(KubernetesResource.class.getName()); + private static final DotName VALIDATION_SCHEMA = DotName.createSimple(ValidationSchema.class.getName()); private static final DotName KUBERNETES_RESOURCE_LIST = DotName .createSimple(KubernetesResourceList.class.getName()); private static final DotName KUBE_SCHEMA = DotName.createSimple(KubeSchema.class.getName()); @@ -189,6 +191,7 @@ public void process(ApplicationIndexBuildItem applicationIndex, CombinedIndexBui ignoredJsonDeserializationClasses.produce(new IgnoreJsonDeserializeClassBuildItem(KUBE_SCHEMA)); ignoredJsonDeserializationClasses.produce(new IgnoreJsonDeserializeClassBuildItem(KUBERNETES_RESOURCE_LIST)); ignoredJsonDeserializationClasses.produce(new IgnoreJsonDeserializeClassBuildItem(KUBERNETES_RESOURCE)); + ignoredJsonDeserializationClasses.produce(new IgnoreJsonDeserializeClassBuildItem(VALIDATION_SCHEMA)); final String[] deserializerClasses = fullIndex .getAllKnownSubclasses(DotName.createSimple("com.fasterxml.jackson.databind.JsonDeserializer")) diff --git a/extensions/kubernetes/openshift/deployment/src/main/java/io/quarkus/openshift/deployment/OpenshiftProcessor.java b/extensions/kubernetes/openshift/deployment/src/main/java/io/quarkus/openshift/deployment/OpenshiftProcessor.java index 6460ca8c0feef8..9f6f1da037ebb0 100644 --- a/extensions/kubernetes/openshift/deployment/src/main/java/io/quarkus/openshift/deployment/OpenshiftProcessor.java +++ b/extensions/kubernetes/openshift/deployment/src/main/java/io/quarkus/openshift/deployment/OpenshiftProcessor.java @@ -6,8 +6,8 @@ import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.ApplicationInfoBuildItem; +import io.quarkus.kubernetes.deployment.DeploymentResourceKind; import io.quarkus.kubernetes.deployment.OpenshiftConfig; -import io.quarkus.kubernetes.deployment.OpenshiftConfig.DeploymentResourceKind; import io.quarkus.kubernetes.deployment.ResourceNameUtil; import io.quarkus.kubernetes.spi.KubernetesDeploymentTargetBuildItem; import io.quarkus.kubernetes.spi.KubernetesResourceMetadataBuildItem; @@ -22,13 +22,13 @@ public void checkOpenshift(ApplicationInfoBuildItem applicationInfo, Capabilitie DeploymentResourceKind deploymentResourceKind = config.getDeploymentResourceKind(capabilities); deploymentTargets .produce( - new KubernetesDeploymentTargetBuildItem(OPENSHIFT, deploymentResourceKind.kind, - deploymentResourceKind.apiGroup, - deploymentResourceKind.apiVersion, true, + new KubernetesDeploymentTargetBuildItem(OPENSHIFT, deploymentResourceKind.getKind(), + deploymentResourceKind.getGroup(), + deploymentResourceKind.getVersion(), true, config.getDeployStrategy())); String name = ResourceNameUtil.getResourceName(config, applicationInfo); - resourceMeta.produce(new KubernetesResourceMetadataBuildItem(OPENSHIFT, deploymentResourceKind.apiGroup, - deploymentResourceKind.apiVersion, deploymentResourceKind.kind, name)); + resourceMeta.produce(new KubernetesResourceMetadataBuildItem(OPENSHIFT, deploymentResourceKind.getGroup(), + deploymentResourceKind.getVersion(), deploymentResourceKind.getKind(), name)); } } diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/AddCronJobResourceDecorator.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/AddCronJobResourceDecorator.java index 907043428f78ce..c3bdf1f41ff855 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/AddCronJobResourceDecorator.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/AddCronJobResourceDecorator.java @@ -65,6 +65,8 @@ private void initCronJobResourceWithDefaults(CronJobBuilder builder) { // - match labels if (jobTemplateSpec.buildSelector().getMatchLabels() == null) { jobTemplateSpec.editSelector().withMatchLabels(new HashMap<>()).endSelector(); + } else { + jobTemplateSpec.withSelector(null); } // - termination grace period seconds if (jobTemplateSpec.buildTemplate().getSpec().getTerminationGracePeriodSeconds() == null) { diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DeploymentResourceKind.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DeploymentResourceKind.java new file mode 100644 index 00000000000000..7be2ade86c9eae --- /dev/null +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DeploymentResourceKind.java @@ -0,0 +1,91 @@ +package io.quarkus.kubernetes.deployment; + +import static io.quarkus.kubernetes.deployment.Constants.BATCH_GROUP; +import static io.quarkus.kubernetes.deployment.Constants.BATCH_VERSION; +import static io.quarkus.kubernetes.deployment.Constants.CRONJOB; +import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT; +import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_CONFIG; +import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_CONFIG_GROUP; +import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_CONFIG_VERSION; +import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_GROUP; +import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_VERSION; +import static io.quarkus.kubernetes.deployment.Constants.JOB; +import static io.quarkus.kubernetes.deployment.Constants.KNATIVE; +import static io.quarkus.kubernetes.deployment.Constants.KNATIVE_SERVICE; +import static io.quarkus.kubernetes.deployment.Constants.KNATIVE_SERVICE_GROUP; +import static io.quarkus.kubernetes.deployment.Constants.KNATIVE_SERVICE_VERSION; +import static io.quarkus.kubernetes.deployment.Constants.OPENSHIFT; +import static io.quarkus.kubernetes.deployment.Constants.STATEFULSET; + +import java.util.Set; + +import io.dekorate.utils.Strings; +import io.fabric8.kubernetes.api.model.HasMetadata; + +public enum DeploymentResourceKind { + + Deployment(DEPLOYMENT, DEPLOYMENT_GROUP, DEPLOYMENT_VERSION), + @Deprecated(since = "OpenShift 4.14") + DeploymentConfig(DEPLOYMENT_CONFIG, DEPLOYMENT_CONFIG_GROUP, DEPLOYMENT_CONFIG_VERSION, OPENSHIFT), + StatefulSet(STATEFULSET, DEPLOYMENT_GROUP, DEPLOYMENT_VERSION), + Job(JOB, BATCH_GROUP, BATCH_VERSION), + CronJob(CRONJOB, BATCH_GROUP, BATCH_VERSION), + KnativeService(KNATIVE_SERVICE, KNATIVE_SERVICE_GROUP, KNATIVE_SERVICE_VERSION, KNATIVE); + + private final String kind; + private final String group; + private final String version; + private final Set requiredTargets; + + DeploymentResourceKind(String kind, String group, String version, String... requiredTargets) { + this(kind, group, version, Set.of(requiredTargets)); + } + + DeploymentResourceKind(String kind, String group, String version, Set requiredTargets) { + this.kind = kind; + this.group = group; + this.version = version; + this.requiredTargets = requiredTargets; + } + + public static final DeploymentResourceKind find(String apiGroup, String apiVersion, String kind) { + for (DeploymentResourceKind deploymentResourceKind : DeploymentResourceKind.values()) { + if (deploymentResourceKind.kind.equals(kind) && deploymentResourceKind.group.equals(apiGroup) + && deploymentResourceKind.version.equals(apiVersion)) { + return deploymentResourceKind; + } + } + String apiGroupVersion = Strings.isNullOrEmpty(apiGroup) ? apiVersion : apiGroup + "/" + apiVersion; + throw new IllegalArgumentException("Could not find DeploymentResourceKind for " + apiGroupVersion + " " + kind); + } + + public boolean isAvailalbleOn(String target) { + return requiredTargets.isEmpty() || requiredTargets.contains(target); + } + + public boolean matches(HasMetadata resource) { + String resourceKind = HasMetadata.getKind(resource.getClass()); + String resourceVersion = HasMetadata.getApiVersion(resource.getClass()); + return resourceKind.equals(getKind()) && resourceVersion.equals(getApiVersion()); + } + + public String getKind() { + return kind; + } + + public String getGroup() { + return group; + } + + public String getVersion() { + return version; + } + + public Set getRequiredTargets() { + return requiredTargets; + } + + public String getApiVersion() { + return group + "/" + version; + } +} diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DeploymentTargetEntry.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DeploymentTargetEntry.java index 84ff998361e260..295bc87a77dfc0 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DeploymentTargetEntry.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/DeploymentTargetEntry.java @@ -4,13 +4,13 @@ public class DeploymentTargetEntry { private final String name; - private final String kind; + private final DeploymentResourceKind deploymentResourceKind; private final int priority; private final DeployStrategy deployStrategy; - public DeploymentTargetEntry(String name, String kind, int priority, DeployStrategy deployStrategy) { + public DeploymentTargetEntry(String name, DeploymentResourceKind kind, int priority, DeployStrategy deployStrategy) { this.name = name; - this.kind = kind; + this.deploymentResourceKind = kind; this.priority = priority; this.deployStrategy = deployStrategy; } @@ -19,8 +19,8 @@ public String getName() { return name; } - public String getKind() { - return kind; + public DeploymentResourceKind getDeploymentResourceKind() { + return deploymentResourceKind; } public int getPriority() { diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java index 429c201834fab7..7a4c4405ff3ff6 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesCommonHelper.java @@ -75,6 +75,7 @@ import io.dekorate.project.Project; import io.dekorate.project.ScmInfo; import io.dekorate.utils.Annotations; +import io.dekorate.utils.Git; import io.dekorate.utils.Labels; import io.dekorate.utils.Strings; import io.fabric8.kubernetes.api.model.ContainerBuilder; @@ -989,7 +990,7 @@ private static List createAnnotationDecorators(Optional deploymentKind; + Optional deploymentKind; /** * The namespace the generated resources should belong to. @@ -625,13 +609,12 @@ public RbacConfig getRbacConfig() { return rbac; } - public KubernetesConfig.DeploymentResourceKind getDeploymentResourceKind(Capabilities capabilities) { + public DeploymentResourceKind getDeploymentResourceKind(Capabilities capabilities) { if (deploymentKind.isPresent()) { - return deploymentKind.get(); + return deploymentKind.filter(k -> k.isAvailalbleOn(KUBERNETES)).get(); } else if (capabilities.isPresent(Capability.PICOCLI)) { - return KubernetesConfig.DeploymentResourceKind.Job; + return DeploymentResourceKind.Job; } - return DeploymentResourceKind.Deployment; } diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeployer.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeployer.java index 8e82e3351b9a46..6c348b20180f1f 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeployer.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeployer.java @@ -56,6 +56,7 @@ import io.quarkus.kubernetes.spi.KubernetesDeploymentClusterBuildItem; import io.quarkus.kubernetes.spi.KubernetesOptionalResourceDefinitionBuildItem; import io.quarkus.kubernetes.spi.KubernetesOutputDirectoryBuildItem; +import io.quarkus.logging.Log; public class KubernetesDeployer { @@ -198,6 +199,19 @@ private DeploymentResultBuildItem deploy(DeploymentTargetEntry deploymentTarget, try (FileInputStream fis = new FileInputStream(manifest)) { KubernetesList list = Serialization.unmarshalAsList(fis); + + Optional conflictingResource = findConflictingResource(client, deploymentTarget, + list.getItems()); + if (conflictingResource.isPresent()) { + String messsage = "Skipping deployment of " + deploymentTarget.getDeploymentResourceKind() + " " + + conflictingResource.get().getMetadata().getName() + " because a " + + conflictingResource.get().getKind() + " with the same name exists."; + log.warn(messsage); + Log.warn("This may occur when switching deployment targets, or when the default deployment target is changed."); + Log.warn("Please remove conflicting resource and try again."); + throw new IllegalStateException(messsage); + } + list.getItems().stream().filter(distinctByResourceKey()).forEach(i -> { deployResource(deploymentTarget, client, i, optionalResourceDefinitions); log.info("Applied: " + i.getKind() + " " + i.getMetadata().getName() + "."); @@ -205,9 +219,11 @@ private DeploymentResultBuildItem deploy(DeploymentTargetEntry deploymentTarget, printExposeInformation(client, list, openshiftConfig, applicationInfo); - HasMetadata m = list.getItems().stream().filter(r -> r.getKind().equals(deploymentTarget.getKind())) + HasMetadata m = list.getItems().stream() + .filter(r -> deploymentTarget.getDeploymentResourceKind().matches(r)) .findFirst().orElseThrow(() -> new IllegalStateException( - "No " + deploymentTarget.getKind() + " found under: " + manifest.getAbsolutePath())); + "No " + deploymentTarget.getDeploymentResourceKind() + " found under: " + + manifest.getAbsolutePath())); return new DeploymentResultBuildItem(m.getMetadata().getName(), m.getMetadata().getLabels()); } catch (FileNotFoundException e) { throw new IllegalStateException("Can't find generated kubernetes manifest: " + manifest.getAbsolutePath()); @@ -255,6 +271,35 @@ private void deployResource(DeploymentTargetEntry deploymentTarget, KubernetesCl } } + private Optional findConflictingResource(KubernetesClient clinet, + DeploymentTargetEntry deploymentTarget, List generated) { + HasMetadata deploymentResource = generated.stream() + .filter(r -> deploymentTarget.getDeploymentResourceKind().matches(r)) + .findFirst() + .orElseThrow(() -> new IllegalStateException( + "No " + deploymentTarget.getDeploymentResourceKind() + " found under: " + deploymentTarget.getName())); + String name = deploymentResource.getMetadata().getName(); + + for (DeploymentResourceKind deploymentKind : DeploymentResourceKind.values()) { + if (deploymentKind.matches(deploymentResource)) { + continue; + } + try { + GenericKubernetesResource resource = clinet + .genericKubernetesResources(deploymentKind.getApiVersion(), deploymentKind.getKind()).withName(name) + .get(); + if (resource != null) { + Log.warn("Found conflicting resource:" + resource.getApiVersion() + "/" + resource.getKind() + ":" + + resource.getMetadata().getName()); + return Optional.of(resource); + } + } catch (KubernetesClientException e) { + // ignore + } + } + return Optional.empty(); + } + private void deleteResource(HasMetadata metadata, Resource r) { r.delete(); try { diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesProcessor.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesProcessor.java index 338948577a38ae..bb2fac1682e0fc 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesProcessor.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesProcessor.java @@ -79,9 +79,10 @@ public EnabledKubernetesDeploymentTargetsBuildItem enabledKubernetesDeploymentTa List entries = new ArrayList<>(mergedDeploymentTargets.size()); for (KubernetesDeploymentTargetBuildItem deploymentTarget : mergedDeploymentTargets) { if (deploymentTarget.isEnabled()) { - entries.add(new DeploymentTargetEntry(deploymentTarget.getName(), - deploymentTarget.getKind(), deploymentTarget.getPriority(), - deploymentTarget.getDeployStrategy())); + DeploymentResourceKind deploymentResourceKind = DeploymentResourceKind.find(deploymentTarget.getGroup(), + deploymentTarget.getVersion(), deploymentTarget.getKind()); + entries.add(new DeploymentTargetEntry(deploymentTarget.getName(), deploymentResourceKind, + deploymentTarget.getPriority(), deploymentTarget.getDeployStrategy())); } } return new EnabledKubernetesDeploymentTargetsBuildItem(entries); diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftConfig.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftConfig.java index 5d89ccba2a2d70..afdd1b33e9a3d5 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftConfig.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftConfig.java @@ -1,19 +1,8 @@ package io.quarkus.kubernetes.deployment; -import static io.quarkus.kubernetes.deployment.Constants.BATCH_GROUP; -import static io.quarkus.kubernetes.deployment.Constants.BATCH_VERSION; -import static io.quarkus.kubernetes.deployment.Constants.CRONJOB; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_CONFIG; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_CONFIG_GROUP; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_CONFIG_VERSION; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_GROUP; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_VERSION; -import static io.quarkus.kubernetes.deployment.Constants.JOB; import static io.quarkus.kubernetes.deployment.Constants.OPENSHIFT; import static io.quarkus.kubernetes.deployment.Constants.S2I; -import static io.quarkus.kubernetes.deployment.Constants.STATEFULSET; import java.util.Collections; import java.util.List; @@ -39,25 +28,6 @@ public static enum OpenshiftFlavor { v4; } - public static enum DeploymentResourceKind { - Deployment(DEPLOYMENT, DEPLOYMENT_GROUP, DEPLOYMENT_VERSION), - @Deprecated(since = "OpenShift 4.14") - DeploymentConfig(DEPLOYMENT_CONFIG, DEPLOYMENT_CONFIG_GROUP, DEPLOYMENT_CONFIG_VERSION), - StatefulSet(STATEFULSET, DEPLOYMENT_GROUP, DEPLOYMENT_VERSION), - Job(JOB, BATCH_GROUP, BATCH_VERSION), - CronJob(CRONJOB, BATCH_GROUP, BATCH_VERSION); - - public final String kind; - public final String apiGroup; - public final String apiVersion; - - DeploymentResourceKind(String kind, String apiGroup, String apiVersion) { - this.kind = kind; - this.apiGroup = apiGroup; - this.apiVersion = apiVersion; - } - } - /** * The OpenShift flavor / version to use. * Older versions of OpenShift have minor differences in the labels and fields they support. @@ -652,11 +622,10 @@ public static boolean isOpenshiftBuildEnabled(ContainerImageConfig containerImag public DeploymentResourceKind getDeploymentResourceKind(Capabilities capabilities) { if (deploymentKind.isPresent()) { - return deploymentKind.get(); + return deploymentKind.filter(k -> k.isAvailalbleOn(OPENSHIFT)).get(); } else if (capabilities.isPresent(Capability.PICOCLI)) { return DeploymentResourceKind.Job; } - return (flavor == OpenshiftFlavor.v3) ? DeploymentResourceKind.DeploymentConfig : DeploymentResourceKind.Deployment; } } diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java index f49540aed39ef3..129735aeb5e88d 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java @@ -56,7 +56,6 @@ import io.quarkus.deployment.pkg.PackageConfig; import io.quarkus.deployment.pkg.builditem.OutputTargetBuildItem; import io.quarkus.kubernetes.client.spi.KubernetesClientCapabilityBuildItem; -import io.quarkus.kubernetes.deployment.OpenshiftConfig.DeploymentResourceKind; import io.quarkus.kubernetes.spi.ConfiguratorBuildItem; import io.quarkus.kubernetes.spi.CustomProjectRootBuildItem; import io.quarkus.kubernetes.spi.DecoratorBuildItem; @@ -94,12 +93,13 @@ public void checkOpenshift(ApplicationInfoBuildItem applicationInfo, Capabilitie DeploymentResourceKind deploymentResourceKind = config.getDeploymentResourceKind(capabilities); deploymentTargets.produce( - new KubernetesDeploymentTargetBuildItem(OPENSHIFT, deploymentResourceKind.kind, deploymentResourceKind.apiGroup, - deploymentResourceKind.apiVersion, OPENSHIFT_PRIORITY, openshiftEnabled, config.deployStrategy)); + new KubernetesDeploymentTargetBuildItem(OPENSHIFT, deploymentResourceKind.getKind(), + deploymentResourceKind.getGroup(), + deploymentResourceKind.getVersion(), OPENSHIFT_PRIORITY, openshiftEnabled, config.deployStrategy)); if (openshiftEnabled) { String name = ResourceNameUtil.getResourceName(config, applicationInfo); - resourceMeta.produce(new KubernetesResourceMetadataBuildItem(OPENSHIFT, deploymentResourceKind.apiGroup, - deploymentResourceKind.apiVersion, deploymentResourceKind.kind, name)); + resourceMeta.produce(new KubernetesResourceMetadataBuildItem(OPENSHIFT, deploymentResourceKind.getGroup(), + deploymentResourceKind.getVersion(), deploymentResourceKind.getKind(), name)); } } diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/VanillaKubernetesProcessor.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/VanillaKubernetesProcessor.java index 04d6f0c5d1859d..a97429c6cf610c 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/VanillaKubernetesProcessor.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/VanillaKubernetesProcessor.java @@ -1,8 +1,6 @@ package io.quarkus.kubernetes.deployment; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_GROUP; -import static io.quarkus.kubernetes.deployment.Constants.DEPLOYMENT_VERSION; import static io.quarkus.kubernetes.deployment.Constants.INGRESS; import static io.quarkus.kubernetes.deployment.Constants.KUBERNETES; import static io.quarkus.kubernetes.deployment.Constants.LIVENESS_PROBE; @@ -76,22 +74,24 @@ public void checkVanillaKubernetes(ApplicationInfoBuildItem applicationInfo, Cap KubernetesConfig config, BuildProducer deploymentTargets, BuildProducer resourceMeta) { - String kind = config.getDeploymentResourceKind(capabilities).kind; + DeploymentResourceKind deploymentResourceKind = config.getDeploymentResourceKind(capabilities); List userSpecifiedDeploymentTargets = KubernetesConfigUtil.getConfiguredDeploymentTargets(); if (userSpecifiedDeploymentTargets.isEmpty() || userSpecifiedDeploymentTargets.contains(KUBERNETES)) { // when nothing was selected by the user, we enable vanilla Kubernetes by default - deploymentTargets.produce(new KubernetesDeploymentTargetBuildItem(KUBERNETES, kind, DEPLOYMENT_GROUP, - DEPLOYMENT_VERSION, VANILLA_KUBERNETES_PRIORITY, true, config.deployStrategy)); + deploymentTargets.produce(new KubernetesDeploymentTargetBuildItem(KUBERNETES, + deploymentResourceKind.getKind(), deploymentResourceKind.getGroup(), deploymentResourceKind.getVersion(), + VANILLA_KUBERNETES_PRIORITY, true, config.deployStrategy)); String name = ResourceNameUtil.getResourceName(config, applicationInfo); - resourceMeta.produce(new KubernetesResourceMetadataBuildItem(KUBERNETES, DEPLOYMENT_GROUP, DEPLOYMENT_VERSION, - kind, name)); + resourceMeta.produce(new KubernetesResourceMetadataBuildItem(KUBERNETES, deploymentResourceKind.getGroup(), + deploymentResourceKind.getVersion(), deploymentResourceKind.getKind(), name)); } else { deploymentTargets - .produce(new KubernetesDeploymentTargetBuildItem(KUBERNETES, kind, DEPLOYMENT_GROUP, - DEPLOYMENT_VERSION, VANILLA_KUBERNETES_PRIORITY, false, config.deployStrategy)); + .produce(new KubernetesDeploymentTargetBuildItem(KUBERNETES, deploymentResourceKind.getKind(), + deploymentResourceKind.getGroup(), + deploymentResourceKind.getVersion(), VANILLA_KUBERNETES_PRIORITY, false, config.deployStrategy)); } } @@ -166,16 +166,15 @@ public List createDecorators(ApplicationInfoBuildItem applic livenessPath, readinessPath, startupPath, roles, clusterRoles, serviceAccounts, roleBindings)); - KubernetesConfig.DeploymentResourceKind deploymentKind = config.getDeploymentResourceKind(capabilities); - if (deploymentKind != KubernetesConfig.DeploymentResourceKind.Deployment) { + DeploymentResourceKind deploymentKind = config.getDeploymentResourceKind(capabilities); + if (deploymentKind != DeploymentResourceKind.Deployment) { result.add(new DecoratorBuildItem(KUBERNETES, new RemoveDeploymentResourceDecorator(name))); } - - if (deploymentKind == KubernetesConfig.DeploymentResourceKind.StatefulSet) { + if (deploymentKind == DeploymentResourceKind.StatefulSet) { result.add(new DecoratorBuildItem(KUBERNETES, new AddStatefulSetResourceDecorator(name, config))); - } else if (deploymentKind == KubernetesConfig.DeploymentResourceKind.Job) { + } else if (deploymentKind == DeploymentResourceKind.Job) { result.add(new DecoratorBuildItem(KUBERNETES, new AddJobResourceDecorator(name, config.job))); - } else if (deploymentKind == KubernetesConfig.DeploymentResourceKind.CronJob) { + } else if (deploymentKind == DeploymentResourceKind.CronJob) { result.add(new DecoratorBuildItem(KUBERNETES, new AddCronJobResourceDecorator(name, config.cronJob))); } diff --git a/extensions/kubernetes/vanilla/deployment/src/main/resources/dev-ui/qwc-kubernetes-manifest.js b/extensions/kubernetes/vanilla/deployment/src/main/resources/dev-ui/qwc-kubernetes-manifest.js index 6c33ed70fb676e..145af77540418d 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/resources/dev-ui/qwc-kubernetes-manifest.js +++ b/extensions/kubernetes/vanilla/deployment/src/main/resources/dev-ui/qwc-kubernetes-manifest.js @@ -1,12 +1,14 @@ import { LitElement, html, css} from 'lit'; import { JsonRpc } from 'jsonrpc'; -import 'qui-code-block'; +import { observeState } from 'lit-element-state'; +import { themeState } from 'theme-state'; +import '@quarkus-webcomponents/codeblock'; import '@vaadin/icon'; import '@vaadin/tabs'; import '@vaadin/tabsheet'; import '@vaadin/progress-bar'; -export class QwcKubernetesManifest extends LitElement { +export class QwcKubernetesManifest extends observeState(LitElement) { jsonRpc = new JsonRpc(this); @@ -98,8 +100,9 @@ export class QwcKubernetesManifest extends LitElement { return html`
+ mode="yaml" + content="${yaml}" + theme="${themeState.theme.name}">
`; } diff --git a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenInstanceBuildItem.java b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenInstanceBuildItem.java new file mode 100644 index 00000000000000..91a19805c755bf --- /dev/null +++ b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenInstanceBuildItem.java @@ -0,0 +1,39 @@ +package io.quarkus.oidc.client.deployment; + +import java.util.Objects; + +import org.jboss.jandex.AnnotationTarget; + +import io.quarkus.builder.item.MultiBuildItem; + +/** + * Represents one {@link io.quarkus.oidc.token.propagation.AccessToken} annotation instance. + */ +public final class AccessTokenInstanceBuildItem extends MultiBuildItem { + + private final String clientName; + private final boolean tokenExchange; + private final AnnotationTarget annotationTarget; + + AccessTokenInstanceBuildItem(String clientName, Boolean tokenExchange, AnnotationTarget annotationTarget) { + this.clientName = Objects.requireNonNull(clientName); + this.tokenExchange = tokenExchange; + this.annotationTarget = Objects.requireNonNull(annotationTarget); + } + + public String getClientName() { + return clientName; + } + + public boolean exchangeTokenActivated() { + return tokenExchange; + } + + public AnnotationTarget getAnnotationTarget() { + return annotationTarget; + } + + public String targetClass() { + return annotationTarget.asClass().name().toString(); + } +} diff --git a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenRequestFilterGenerator.java b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenRequestFilterGenerator.java new file mode 100644 index 00000000000000..d515193e36bf2e --- /dev/null +++ b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenRequestFilterGenerator.java @@ -0,0 +1,95 @@ +package io.quarkus.oidc.client.deployment; + +import java.lang.annotation.RetentionPolicy; +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.Map; + +import jakarta.annotation.Priority; +import jakarta.inject.Singleton; + +import io.quarkus.arc.deployment.GeneratedBeanBuildItem; +import io.quarkus.arc.deployment.GeneratedBeanGizmoAdaptor; +import io.quarkus.arc.deployment.UnremovableBeanBuildItem; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.gizmo.ClassCreator; + +public final class AccessTokenRequestFilterGenerator { + + private static final int AUTHENTICATION = 1000; + + private record ClientNameAndExchangeToken(String clientName, boolean exchangeTokenActivated) { + } + + private final BuildProducer unremovableBeansProducer; + private final BuildProducer reflectiveClassProducer; + private final BuildProducer generatedBeanProducer; + private final Class requestFilterClass; + private final Map cache = new HashMap<>(); + + public AccessTokenRequestFilterGenerator(BuildProducer unremovableBeansProducer, + BuildProducer reflectiveClassProducer, + BuildProducer generatedBeanProducer, Class requestFilterClass) { + this.unremovableBeansProducer = unremovableBeansProducer; + this.reflectiveClassProducer = reflectiveClassProducer; + this.generatedBeanProducer = generatedBeanProducer; + this.requestFilterClass = requestFilterClass; + } + + public String generateClass(AccessTokenInstanceBuildItem instance) { + return cache.computeIfAbsent( + new ClientNameAndExchangeToken(instance.getClientName(), instance.exchangeTokenActivated()), i -> { + var adaptor = new GeneratedBeanGizmoAdaptor(generatedBeanProducer); + String className = createUniqueClassName(i); + try (ClassCreator classCreator = ClassCreator.builder() + .className(className) + .superClass(requestFilterClass) + .classOutput(adaptor) + .build()) { + classCreator.addAnnotation(Priority.class).add("value", AUTHENTICATION); + classCreator.addAnnotation(Singleton.class); + + if (!i.clientName().isEmpty()) { + try (var methodCreator = classCreator.getMethodCreator("getClientName", String.class)) { + methodCreator.addAnnotation(Override.class.getName(), RetentionPolicy.CLASS); + methodCreator.setModifiers(Modifier.PROTECTED); + methodCreator.returnValue(methodCreator.load(i.clientName())); + } + } + if (i.exchangeTokenActivated()) { + try (var methodCreator = classCreator.getMethodCreator("isExchangeToken", boolean.class)) { + methodCreator.addAnnotation(Override.class.getName(), RetentionPolicy.CLASS); + methodCreator.setModifiers(Modifier.PROTECTED); + methodCreator.returnBoolean(true); + } + } + } + unremovableBeansProducer.produce(UnremovableBeanBuildItem.beanClassNames(className)); + reflectiveClassProducer + .produce(ReflectiveClassBuildItem.builder(className).methods().fields().constructors().build()); + return className; + }); + } + + private String createUniqueClassName(ClientNameAndExchangeToken i) { + return "%s_%sClient_%sTokenExchange".formatted(requestFilterClass.getName(), clientName(i.clientName()), + exchangeTokenName(i.exchangeTokenActivated())); + } + + private static String clientName(String clientName) { + if (clientName.isEmpty()) { + return "Default"; + } else { + return clientName; + } + } + + private static String exchangeTokenName(boolean enabled) { + if (enabled) { + return "Enabled"; + } else { + return "Default"; + } + } +} diff --git a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java index 04ffea66bf3b8c..7019e54ec2c582 100644 --- a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java +++ b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java @@ -3,6 +3,7 @@ import static io.quarkus.oidc.client.deployment.OidcClientFilterDeploymentHelper.sanitize; import java.lang.reflect.Modifier; +import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -12,6 +13,7 @@ import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Singleton; +import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.DotName; import io.quarkus.arc.BeanDestroyer; @@ -28,6 +30,7 @@ import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.ApplicationArchivesBuildItem; +import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.ExtensionSslNativeSupportBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.gizmo.ClassCreator; @@ -45,12 +48,15 @@ import io.quarkus.oidc.client.runtime.OidcClientsConfig; import io.quarkus.oidc.client.runtime.TokensHelper; import io.quarkus.oidc.client.runtime.TokensProducer; +import io.quarkus.oidc.token.propagation.AccessToken; import io.quarkus.runtime.TlsConfig; import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; @BuildSteps(onlyIf = OidcClientBuildStep.IsEnabled.class) public class OidcClientBuildStep { + private static final DotName ACCESS_TOKEN = DotName.createSimple(AccessToken.class.getName()); + @BuildStep ExtensionSslNativeSupportBuildItem enableSslInNative() { return new ExtensionSslNativeSupportBuildItem(Feature.OIDC_CLIENT); @@ -149,6 +155,26 @@ public void createNonDefaultTokensProducers( } } + @BuildStep + public List collectAccessTokenInstances(CombinedIndexBuildItem index) { + record ItemBuilder(AnnotationInstance instance) { + + private String toClientName() { + var value = instance.value("exchangeTokenClient"); + return value == null || value.asString().equals("Default") ? "" : value.asString(); + } + + private boolean toExchangeToken() { + return instance.value("exchangeTokenClient") != null; + } + + private AccessTokenInstanceBuildItem build() { + return new AccessTokenInstanceBuildItem(toClientName(), toExchangeToken(), instance.target()); + } + } + return index.getIndex().getAnnotations(ACCESS_TOKEN).stream().map(ItemBuilder::new).map(ItemBuilder::build).toList(); + } + /** * Creates a Tokens producer class like follows: * diff --git a/extensions/oidc-client/deployment/src/test/java/io/quarkus/oidc/client/OidcClientPasswordGrantSecretIsMissingTestCase.java b/extensions/oidc-client/deployment/src/test/java/io/quarkus/oidc/client/OidcClientPasswordGrantSecretIsMissingTestCase.java new file mode 100644 index 00000000000000..0e1d80f22152cc --- /dev/null +++ b/extensions/oidc-client/deployment/src/test/java/io/quarkus/oidc/client/OidcClientPasswordGrantSecretIsMissingTestCase.java @@ -0,0 +1,48 @@ +package io.quarkus.oidc.client; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.test.QuarkusUnitTest; + +public class OidcClientPasswordGrantSecretIsMissingTestCase { + + @RegisterExtension + static final QuarkusUnitTest test = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addAsResource(new StringAsset( + "quarkus.oidc-client.token-path=http://localhost:8180/oidc/tokens\n" + + "quarkus.oidc-client.client-id=quarkus\n" + + "quarkus.oidc-client.credentials.secret=secret\n" + + "quarkus.oidc-client.grant.type=password\n" + + "quarkus.oidc-client.grant-options.password.user=alice\n"), + "application.properties")) + .assertException(t -> { + Throwable e = t; + ConfigurationException te = null; + while (e != null) { + if (e instanceof ConfigurationException) { + te = (ConfigurationException) e; + break; + } + e = e.getCause(); + } + assertNotNull(te); + assertTrue( + te.getMessage() + .contains("Username and password must be set when a password grant is used"), + te.getMessage()); + }); + + @Test + public void test() { + Assertions.fail(); + } + +} diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/OidcClientConfig.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/OidcClientConfig.java index 52807d9f59e875..ee44ed0479be4d 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/OidcClientConfig.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/OidcClientConfig.java @@ -90,7 +90,13 @@ public static enum Type { * 'urn:openid:params:grant-type:ciba' grant requiring an OIDC client authentication as well as 'auth_req_id' * parameter which must be passed to OidcClient at the token request time. */ - CIBA("urn:openid:params:grant-type:ciba"); + CIBA("urn:openid:params:grant-type:ciba"), + /** + * 'urn:ietf:params:oauth:grant-type:device_code' grant requiring an OIDC client authentication as well as + * 'device_code' + * parameter which must be passed to OidcClient at the token request time. + */ + DEVICE("urn:ietf:params:oauth:grant-type:device_code"); private String grantType; diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java index 3ef2337efc7372..9697ec9d150dcc 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/AbstractTokensProducer.java @@ -1,5 +1,6 @@ package io.quarkus.oidc.client.runtime; +import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -45,7 +46,7 @@ public void init() { protected void initTokens() { if (earlyTokenAcquisition) { - tokensHelper.initTokens(oidcClient); + tokensHelper.initTokens(oidcClient, additionalParameters()); } } @@ -56,7 +57,7 @@ public Uni getTokens() { LOG.debugf("%s OidcClient will discard the current access and refresh tokens", clientId.orElse(DEFAULT_OIDC_CLIENT_ID)); } - return tokensHelper.getTokens(oidcClient, forceNewTokens); + return tokensHelper.getTokens(oidcClient, additionalParameters(), forceNewTokens); } public Tokens awaitTokens() { @@ -78,4 +79,11 @@ protected Optional clientId() { protected boolean isForceNewTokens() { return false; } + + /** + * @return Additional parameters which will be used during the token acquisition or refresh methods. + */ + protected Map additionalParameters() { + return Map.of(); + } } diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientImpl.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientImpl.java index 8dcf143c6cadb3..3683eae39d305d 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientImpl.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientImpl.java @@ -20,6 +20,7 @@ import io.quarkus.oidc.common.OidcEndpoint; import io.quarkus.oidc.common.OidcRequestContextProperties; import io.quarkus.oidc.common.OidcRequestFilter; +import io.quarkus.oidc.common.runtime.OidcCommonConfig.Credentials.Jwt.Source; import io.quarkus.oidc.common.runtime.OidcCommonUtils; import io.quarkus.oidc.common.runtime.OidcConstants; import io.smallrye.mutiny.Uni; @@ -47,6 +48,7 @@ public class OidcClientImpl implements OidcClient { private final String grantType; private final String clientSecretBasicAuthScheme; private final Key clientJwtKey; + private final boolean jwtBearerAuthentication; private final OidcClientConfig oidcConfig; private final Map> filters; private volatile boolean closed; @@ -63,7 +65,8 @@ public OidcClientImpl(WebClient client, String tokenRequestUri, String tokenRevo this.oidcConfig = oidcClientConfig; this.filters = filters; this.clientSecretBasicAuthScheme = OidcCommonUtils.initClientSecretBasicAuth(oidcClientConfig); - this.clientJwtKey = OidcCommonUtils.initClientJwtKey(oidcClientConfig); + this.jwtBearerAuthentication = oidcClientConfig.credentials.jwt.source == Source.BEARER; + this.clientJwtKey = jwtBearerAuthentication ? null : OidcCommonUtils.initClientJwtKey(oidcClientConfig); } @Override @@ -143,6 +146,15 @@ private UniOnItem> postRequest(OidcEndpoint.Type endpointTy } if (clientSecretBasicAuthScheme != null) { request.putHeader(AUTHORIZATION_HEADER, clientSecretBasicAuthScheme); + } else if (jwtBearerAuthentication) { + if (!additionalGrantParameters.containsKey(OidcConstants.CLIENT_ASSERTION)) { + String errorMessage = String.format( + "%s OidcClient can not complete the %s grant request because a JWT bearer client_assertion is missing", + oidcConfig.getId().get(), (refresh ? OidcConstants.REFRESH_TOKEN_GRANT : grantType)); + LOG.error(errorMessage); + throw new OidcClientException(errorMessage); + } + body.add(OidcConstants.CLIENT_ASSERTION_TYPE, OidcConstants.JWT_BEARER_CLIENT_ASSERTION_TYPE); } else if (clientJwtKey != null) { // if it is a refresh then a map has already been copied body = !refresh ? copyMultiMap(body) : body; diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientRecorder.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientRecorder.java index cff9f35a930cc8..101e88aab1ae8d 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientRecorder.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/OidcClientRecorder.java @@ -4,6 +4,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; @@ -121,7 +122,8 @@ protected static Uni createOidcClientUni(OidcClientConfig oidcConfig OidcCommonUtils.setHttpClientOptions(oidcConfig, tlsConfig, options); - WebClient client = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx.get()), options); + var mutinyVertx = new io.vertx.mutiny.core.Vertx(vertx.get()); + WebClient client = WebClient.create(mutinyVertx, options); Map> oidcRequestFilters = OidcCommonUtils.getOidcRequestFilters(); @@ -138,7 +140,8 @@ protected static Uni createOidcClientUni(OidcClientConfig oidcConfig OidcCommonUtils.getOidcEndpointUrl(authServerUriString, oidcConfig.tokenPath), OidcCommonUtils.getOidcEndpointUrl(authServerUriString, oidcConfig.revokePath))); } else { - tokenUrisUni = discoverTokenUris(client, oidcRequestFilters, authServerUriString.toString(), oidcConfig); + tokenUrisUni = discoverTokenUris(client, oidcRequestFilters, authServerUriString.toString(), oidcConfig, + mutinyVertx); } } return tokenUrisUni.onItemOrFailure() @@ -170,10 +173,16 @@ public OidcClient apply(OidcConfigurationMetadata metadata, Throwable t) { // Without this block `password` will be listed first, before `username` // which is not a technical problem but might affect Wiremock tests or the endpoints // which expect a specific order. - tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_USERNAME, - grantOptions.get(OidcConstants.PASSWORD_GRANT_USERNAME)); - tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_PASSWORD, - grantOptions.get(OidcConstants.PASSWORD_GRANT_PASSWORD)); + final String userName = grantOptions.get(OidcConstants.PASSWORD_GRANT_USERNAME); + final String userPassword = grantOptions.get(OidcConstants.PASSWORD_GRANT_PASSWORD); + if (userName == null || userPassword == null) { + throw new ConfigurationException( + "Username and password must be set when a password grant is used", + Set.of("quarkus.oidc-client.grant.type", + "quarkus.oidc-client.grant-options")); + } + tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_USERNAME, userName); + tokenGrantParams.add(OidcConstants.PASSWORD_GRANT_PASSWORD, userPassword); for (Map.Entry entry : grantOptions.entrySet()) { if (!OidcConstants.PASSWORD_GRANT_USERNAME.equals(entry.getKey()) && !OidcConstants.PASSWORD_GRANT_PASSWORD.equals(entry.getKey())) { @@ -213,9 +222,11 @@ private static void setGrantClientParams(OidcClientConfig oidcConfig, MultiMap g private static Uni discoverTokenUris(WebClient client, Map> oidcRequestFilters, - String authServerUrl, OidcClientConfig oidcConfig) { + String authServerUrl, OidcClientConfig oidcConfig, io.vertx.mutiny.core.Vertx vertx) { final long connectionDelayInMillisecs = OidcCommonUtils.getConnectionDelayInMillis(oidcConfig); - return OidcCommonUtils.discoverMetadata(client, oidcRequestFilters, authServerUrl, connectionDelayInMillisecs) + return OidcCommonUtils + .discoverMetadata(client, oidcRequestFilters, authServerUrl, connectionDelayInMillisecs, vertx, + oidcConfig.useBlockingDnsLookup) .onItem().transform(json -> new OidcConfigurationMetadata(json.getString("token_endpoint"), json.getString("revocation_endpoint"))); } diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java index 1e347ba9379184..4074c0b2f767b1 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/client/runtime/TokensHelper.java @@ -1,5 +1,6 @@ package io.quarkus.oidc.client.runtime; +import java.util.Map; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import java.util.function.BiConsumer; @@ -16,15 +17,20 @@ public class TokensHelper { .newUpdater(TokensHelper.class, TokenRequestState.class, "tokenRequestState"); public void initTokens(OidcClient oidcClient) { + initTokens(oidcClient, Map.of()); + } + + public void initTokens(OidcClient oidcClient, Map additionalParameters) { //init the tokens, this just happens in a blocking manner for now - tokenRequestStateUpdater.set(this, new TokenRequestState(oidcClient.getTokens().await().indefinitely())); + tokenRequestStateUpdater.set(this, + new TokenRequestState(oidcClient.getTokens(additionalParameters).await().indefinitely())); } public Uni getTokens(OidcClient oidcClient) { - return getTokens(oidcClient, false); + return getTokens(oidcClient, Map.of(), false); } - public Uni getTokens(OidcClient oidcClient, boolean forceNewTokens) { + public Uni getTokens(OidcClient oidcClient, Map additionalParameters, boolean forceNewTokens) { TokenRequestState currentState = null; TokenRequestState newState = null; //if the tokens are expired we refresh them in an async manner @@ -34,7 +40,7 @@ public Uni getTokens(OidcClient oidcClient, boolean forceNewTokens) { if (currentState == null) { //init the initial state //note that this can still happen at runtime as if there is an error then the state will be null - newState = new TokenRequestState(prepareUni(oidcClient.getTokens())); + newState = new TokenRequestState(prepareUni(oidcClient.getTokens(additionalParameters))); if (tokenRequestStateUpdater.compareAndSet(this, currentState, newState)) { return newState.tokenUni; } @@ -46,8 +52,8 @@ public Uni getTokens(OidcClient oidcClient, boolean forceNewTokens) { if (forceNewTokens || tokens.isAccessTokenExpired() || tokens.isAccessTokenWithinRefreshInterval()) { newState = new TokenRequestState( prepareUni((!forceNewTokens && tokens.getRefreshToken() != null && !tokens.isRefreshTokenExpired()) - ? oidcClient.refreshTokens(tokens.getRefreshToken()) - : oidcClient.getTokens())); + ? oidcClient.refreshTokens(tokens.getRefreshToken(), additionalParameters) + : oidcClient.getTokens(additionalParameters))); if (tokenRequestStateUpdater.compareAndSet(this, currentState, newState)) { return newState.tokenUni; } diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java index 2debaf34ecd775..25965449f72908 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java +++ b/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java @@ -17,4 +17,13 @@ @Retention(RetentionPolicy.RUNTIME) @Documented public @interface AccessToken { + + /** + * Selects name of the configured OidcClient and activates token exchange for the annotated REST client. + * Please note that the default OidcClient's name is `Default`. You do not have to enable this attribute + * if you use the default OidcClient and already have either 'quarkus.oidc-token-propagation.exchange-token' + * or 'quarkus.oidc-token-propagation-reactive.exchange-token' property set to 'true' + */ + String exchangeTokenClient() default ""; + } diff --git a/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/OidcEndpoint.java b/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/OidcEndpoint.java index 2707f8f3bb09c9..362580ebf53237 100644 --- a/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/OidcEndpoint.java +++ b/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/OidcEndpoint.java @@ -46,7 +46,7 @@ enum Type { } /** - * Identifies an OIDC tenant to which a given feature applies. + * Identifies one or more OIDC endpoints. */ - Type value() default Type.ALL; + Type[] value() default Type.ALL; } diff --git a/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonConfig.java b/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonConfig.java index b3c9f05ff21d47..2da30b8da5bf5e 100644 --- a/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonConfig.java +++ b/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonConfig.java @@ -74,6 +74,13 @@ public class OidcCommonConfig { @ConfigItem(defaultValue = "10s") public Duration connectionTimeout = Duration.ofSeconds(10); + /** + * Whether DNS lookup should be performed on the worker thread. + * Use this option when you can see logged warnings about blocked Vert.x event loop by HTTP requests to OIDC server. + */ + @ConfigItem(defaultValue = "false") + public boolean useBlockingDnsLookup; + /** * The maximum size of the connection pool used by the WebClient. */ @@ -178,7 +185,7 @@ public static enum Method { POST_JWT, /** - * client id and secret are submitted as HTTP query parameters. This option is only supported for the OIDC + * client id and secret are submitted as HTTP query parameters. This option is only supported by the OIDC * extension. */ QUERY @@ -232,12 +239,29 @@ public void setSecretProvider(Provider secretProvider) { /** * Supports the client authentication `client_secret_jwt` and `private_key_jwt` methods, which involves sending a JWT * token assertion signed with a client secret or private key. + * JWT Bearer client authentication is also supported. * * @see https://openid.net/specs/openid-connect-core-1_0.html#ClientAuthentication */ @ConfigGroup public static class Jwt { + + public static enum Source { + // JWT token is generated by the OIDC provider client to support + // `client_secret_jwt` and `private_key_jwt` authentication methods + CLIENT, + // JWT bearer token as used as a client assertion: https://www.rfc-editor.org/rfc/rfc7523#section-2.2 + // This option is only supported by the OIDC client extension. + BEARER + } + + /** + * JWT token source: OIDC provider client or an existing JWT bearer token. + */ + @ConfigItem(defaultValue = "client") + public Source source = Source.CLIENT; + /** * If provided, indicates that JWT is signed using a secret key. */ @@ -391,6 +415,14 @@ public void setClaims(Map claims) { this.claims = claims; } + public Source getSource() { + return source; + } + + public void setSource(Source source) { + this.source = source; + } + } /** diff --git a/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonUtils.java b/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonUtils.java index 92b12d8ed569e5..e4336d067be90d 100644 --- a/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonUtils.java +++ b/extensions/oidc-common/runtime/src/main/java/io/quarkus/oidc/common/runtime/OidcCommonUtils.java @@ -4,8 +4,10 @@ import java.io.IOException; import java.io.InputStream; import java.net.ConnectException; +import java.net.InetAddress; import java.net.URI; import java.net.URLEncoder; +import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -20,6 +22,8 @@ import java.util.Map; import java.util.Optional; import java.util.OptionalInt; +import java.util.concurrent.Callable; +import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -30,6 +34,7 @@ import io.quarkus.arc.Arc; import io.quarkus.arc.ArcContainer; +import io.quarkus.arc.ClientProxy; import io.quarkus.credentials.CredentialsProvider; import io.quarkus.credentials.runtime.CredentialsProviderFinder; import io.quarkus.oidc.common.OidcEndpoint; @@ -53,8 +58,10 @@ import io.vertx.core.net.KeyStoreOptions; import io.vertx.core.net.ProxyOptions; import io.vertx.mutiny.core.MultiMap; +import io.vertx.mutiny.core.Vertx; import io.vertx.mutiny.core.buffer.Buffer; import io.vertx.mutiny.ext.web.client.HttpRequest; +import io.vertx.mutiny.ext.web.client.HttpResponse; import io.vertx.mutiny.ext.web.client.WebClient; public class OidcCommonUtils { @@ -432,7 +439,7 @@ public static Predicate oidcEndpointNotAvailable() { } public static Uni discoverMetadata(WebClient client, Map> filters, - String authServerUrl, long connectionDelayInMillisecs) { + String authServerUrl, long connectionDelayInMillisecs, Vertx vertx, boolean blockingDnsLookup) { final String discoveryUrl = getDiscoveryUri(authServerUrl); HttpRequest request = client.getAbs(discoveryUrl); if (!filters.isEmpty()) { @@ -442,11 +449,17 @@ public static Uni discoverMetadata(WebClient client, Map { + return sendRequest(vertx, request, blockingDnsLookup).onItem().transform(resp -> { if (resp.statusCode() == 200) { return resp.bodyAsJsonObject(); } else { - LOG.warnf("Discovery has failed, status code: %d", resp.statusCode()); + String errorMessage = resp.bodyAsString(); + if (errorMessage != null && !errorMessage.isEmpty()) { + LOG.warnf("Discovery request %s has failed, status code: %d, error message: %s", discoveryUrl, + resp.statusCode(), errorMessage); + } else { + LOG.warnf("Discovery request %s has failed, status code: %d", discoveryUrl, resp.statusCode()); + } throw new OidcEndpointAccessException(resp.statusCode()); } }).onFailure(oidcEndpointNotAvailable()) @@ -496,9 +509,14 @@ public static Map> getOidcRequestFilt Map> map = new HashMap<>(); for (OidcRequestFilter filter : container.listAll(OidcRequestFilter.class).stream().map(handle -> handle.get()) .collect(Collectors.toList())) { - OidcEndpoint endpoint = filter.getClass().getAnnotation(OidcEndpoint.class); - OidcEndpoint.Type type = endpoint != null ? endpoint.value() : OidcEndpoint.Type.ALL; - map.computeIfAbsent(type, k -> new ArrayList()).add(filter); + OidcEndpoint endpoint = ClientProxy.unwrap(filter).getClass().getAnnotation(OidcEndpoint.class); + if (endpoint != null) { + for (OidcEndpoint.Type type : endpoint.value()) { + map.computeIfAbsent(type, k -> new ArrayList()).add(filter); + } + } else { + map.computeIfAbsent(OidcEndpoint.Type.ALL, k -> new ArrayList()).add(filter); + } } return map; } @@ -524,4 +542,37 @@ public static List getMatchingOidcRequestFilters(Map> sendRequest(io.vertx.core.Vertx vertx, HttpRequest request, + boolean blockingDnsLookup) { + if (blockingDnsLookup) { + return sendRequest(new Vertx(vertx), request, true); + } else { + return request.send(); + } + } + + public static Uni> sendRequest(Vertx vertx, HttpRequest request, boolean blockingDnsLookup) { + if (blockingDnsLookup) { + return vertx.executeBlocking(new Callable() { + @Override + public Void call() { + try { + // cache DNS lookup + InetAddress.getByName(request.host()); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + return null; + } + }).flatMap(new Function>>() { + @Override + public Uni> apply(Void unused) { + return request.send(); + } + }); + } else { + return request.send(); + } + } } diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildStep.java b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildStep.java index e3862e6a1e077a..53cc81dbebe443 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildStep.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildStep.java @@ -3,26 +3,29 @@ import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; -import java.util.Collection; import java.util.List; import java.util.function.BooleanSupplier; +import jakarta.ws.rs.Priorities; + import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationValue; import org.jboss.jandex.DotName; import org.jboss.jandex.Type; import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import io.quarkus.arc.deployment.GeneratedBeanBuildItem; +import io.quarkus.arc.deployment.UnremovableBeanBuildItem; import io.quarkus.deployment.Capabilities; import io.quarkus.deployment.Capability; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.BuildSteps; import io.quarkus.deployment.builditem.AdditionalIndexedClassesBuildItem; -import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.SystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.client.deployment.AccessTokenInstanceBuildItem; +import io.quarkus.oidc.client.deployment.AccessTokenRequestFilterGenerator; import io.quarkus.rest.client.reactive.deployment.DotNames; import io.quarkus.rest.client.reactive.deployment.RegisterProviderAnnotationInstanceBuildItem; import io.quarkus.runtime.configuration.ConfigurationException; @@ -30,19 +33,25 @@ @BuildSteps(onlyIf = OidcTokenPropagationReactiveBuildStep.IsEnabled.class) public class OidcTokenPropagationReactiveBuildStep { - private static final DotName ACCESS_TOKEN = DotName.createSimple(AccessToken.class.getName()); - private static final DotName ACCESS_TOKEN_REQUEST_REACTIVE_FILTER = DotName - .createSimple(AccessTokenRequestReactiveFilter.class.getName()); - @BuildStep - void oidcClientFilterSupport(CombinedIndexBuildItem indexBuildItem, - BuildProducer producer) { - Collection instances = indexBuildItem.getIndex().getAnnotations(ACCESS_TOKEN); - for (AnnotationInstance instance : instances) { - String targetClass = instance.target().asClass().name().toString(); - producer.produce(new RegisterProviderAnnotationInstanceBuildItem(targetClass, AnnotationInstance.create( - DotNames.REGISTER_PROVIDER, instance.target(), List.of(AnnotationValue.createClassValue("value", - Type.create(ACCESS_TOKEN_REQUEST_REACTIVE_FILTER, org.jboss.jandex.Type.Kind.CLASS)))))); + void oidcClientFilterSupport(List accessTokenInstances, + BuildProducer unremovableBeans, + BuildProducer reflectiveClass, + BuildProducer generatedBean, + BuildProducer providerProducer) { + if (!accessTokenInstances.isEmpty()) { + var filterGenerator = new AccessTokenRequestFilterGenerator(unremovableBeans, reflectiveClass, generatedBean, + AccessTokenRequestReactiveFilter.class); + for (AccessTokenInstanceBuildItem instance : accessTokenInstances) { + String providerClass = filterGenerator.generateClass(instance); + providerProducer + .produce(new RegisterProviderAnnotationInstanceBuildItem(instance.targetClass(), + AnnotationInstance.create(DotNames.REGISTER_PROVIDER, instance.getAnnotationTarget(), List.of( + AnnotationValue.createClassValue("value", + Type.create(DotName.createSimple(providerClass), + org.jboss.jandex.Type.Kind.CLASS)), + AnnotationValue.createIntegerValue("priority", Priorities.AUTHENTICATION))))); + } } } @@ -55,7 +64,6 @@ void registerProvider(BuildProducer additionalBeans, ReflectiveClassBuildItem.builder(AccessTokenRequestReactiveFilter.class).methods().fields().build()); additionalIndexedClassesBuildItem .produce(new AdditionalIndexedClassesBuildItem(AccessTokenRequestReactiveFilter.class.getName())); - } @BuildStep(onlyIf = IsEnabledDuringAuth.class) diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenAnnotationTest.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenAnnotationTest.java new file mode 100644 index 00000000000000..e16074e7252527 --- /dev/null +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenAnnotationTest.java @@ -0,0 +1,178 @@ +package io.quarkus.oidc.token.propagation.reactive; + +import static org.hamcrest.Matchers.equalTo; + +import jakarta.annotation.security.RolesAllowed; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.QueryParam; + +import org.eclipse.microprofile.jwt.JsonWebToken; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Unremovable; +import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.test.QuarkusUnitTest; +import io.quarkus.test.common.QuarkusTestResource; +import io.quarkus.test.oidc.client.OidcTestClient; +import io.quarkus.test.oidc.server.OidcWiremockTestResource; +import io.restassured.RestAssured; + +@QuarkusTestResource(OidcWiremockTestResource.class) +public class AccessTokenAnnotationTest { + + final static OidcTestClient client = new OidcTestClient(); + + @RegisterExtension + static final QuarkusUnitTest test = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(DefaultClientDefaultExchange.class, DefaultClientEnabledExchange.class, + NamedClientDefaultExchange.class, MultiProviderFrontendResource.class, ProtectedResource.class, + CustomAccessTokenRequestFilter.class) + .addAsResource( + new StringAsset( + """ + quarkus.oidc.auth-server-url=${keycloak.url}/realms/quarkus + quarkus.oidc.client-id=quarkus-app + quarkus.oidc.credentials.secret=secret + + quarkus.oidc-client.auth-server-url=${quarkus.oidc.auth-server-url} + quarkus.oidc-client.client-id=${quarkus.oidc.client-id} + quarkus.oidc-client.credentials.client-secret.value=${quarkus.oidc.credentials.secret} + quarkus.oidc-client.credentials.client-secret.method=post + quarkus.oidc-client.grant.type=jwt + quarkus.oidc-client.scopes=https://graph.microsoft.com/user.read,offline_access + quarkus.oidc-client.grant-options.jwt.requested_token_use=on_behalf_of + quarkus.oidc-client.token-path=${keycloak.url}/realms/quarkus/jwt-bearer-token + + quarkus.oidc-client.named.auth-server-url=${quarkus.oidc-client.auth-server-url} + quarkus.oidc-client.named.client-id=${quarkus.oidc-client.client-id} + quarkus.oidc-client.named.credentials.client-secret.value=${quarkus.oidc-client.credentials.client-secret.value} + quarkus.oidc-client.named.credentials.client-secret.method=${quarkus.oidc-client.credentials.client-secret.method} + quarkus.oidc-client.named.grant.type=${quarkus.oidc-client.grant.type} + quarkus.oidc-client.named.scopes=${quarkus.oidc-client.scopes} + quarkus.oidc-client.named.grant-options.jwt.requested_token_use=${quarkus.oidc-client.grant-options.jwt.requested_token_use} + quarkus.oidc-client.named.token-path=${quarkus.oidc-client.token-path} + """), + "application.properties")); + + @AfterAll + public static void close() { + client.close(); + } + + @Test + public void testDefaultClientEnabledTokenExchange() { + testRestClientTokenPropagation(true, "defaultClientEnabledExchange"); + } + + @Test + public void testDefaultClientDefaultTokenExchange() { + testRestClientTokenPropagation(false, "defaultClientDefaultExchange"); + } + + @Test + public void testNamedClientDefaultTokenExchange() { + testRestClientTokenPropagation(true, "namedClientDefaultExchange"); + } + + private void testRestClientTokenPropagation(boolean exchangeEnabled, String clientKey) { + String newTokenUsername = exchangeEnabled ? "bob" : "alice"; + RestAssured.given().auth().oauth2(getBearerAccessToken()) + .queryParam("client-key", clientKey) + .when().get("/frontend/token-propagation") + .then() + .statusCode(200) + .body(equalTo("original token username: alice new token username: " + newTokenUsername)); + } + + public String getBearerAccessToken() { + return client.getAccessToken("alice", "alice"); + } + + @RegisterRestClient(baseUri = "http://localhost:8081/protected") + @AccessToken + @Path("/") + public interface DefaultClientDefaultExchange { + @GET + String getUserName(); + } + + @RegisterRestClient(baseUri = "http://localhost:8081/protected") + @AccessToken(exchangeTokenClient = "Default") + @Path("/") + public interface DefaultClientEnabledExchange { + @GET + String getUserName(); + } + + @RegisterRestClient(baseUri = "http://localhost:8081/protected") + @AccessToken(exchangeTokenClient = "named") + @Path("/") + public interface NamedClientDefaultExchange { + @GET + String getUserName(); + } + + // tests no AmbiguousResolutionException is raised + @Singleton + @Unremovable + public static class CustomAccessTokenRequestFilter extends AccessTokenRequestReactiveFilter { + } + + @Path("/frontend") + public static class MultiProviderFrontendResource { + @Inject + @RestClient + DefaultClientDefaultExchange defaultClientDefaultExchange; + + @Inject + @RestClient + DefaultClientEnabledExchange defaultClientEnabledExchange; + + @Inject + @RestClient + NamedClientDefaultExchange namedClientDefaultExchange; + + @Inject + JsonWebToken jwt; + + @GET + @Path("token-propagation") + @RolesAllowed("admin") + public String userNameTokenPropagation(@QueryParam("client-key") String clientKey) { + return getResponseWithExchangedUsername(clientKey); + } + + @GET + @Path("token-propagation-with-augmentor") + @RolesAllowed("tester") // tester role is granted by SecurityIdentityAugmentor + public String userNameTokenPropagationWithSecIdentityAugmentor(@QueryParam("client-key") String clientKey) { + return getResponseWithExchangedUsername(clientKey); + } + + private String getResponseWithExchangedUsername(String clientKey) { + if ("alice".equals(jwt.getName())) { + return "original token username: " + jwt.getName() + " new token username: " + getUserName(clientKey); + } else { + throw new RuntimeException(); + } + } + + private String getUserName(String clientKey) { + return switch (clientKey) { + case "defaultClientDefaultExchange" -> defaultClientDefaultExchange.getUserName(); + case "defaultClientEnabledExchange" -> defaultClientEnabledExchange.getUserName(); + case "namedClientDefaultExchange" -> namedClientDefaultExchange.getUserName(); + default -> throw new IllegalArgumentException("Unknown client key"); + }; + } + } +} diff --git a/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java b/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java index 020ba75a00c7dc..a4eabd43ae0f80 100644 --- a/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java +++ b/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java @@ -4,19 +4,17 @@ import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; import java.util.Collections; -import java.util.Optional; import java.util.function.Consumer; import jakarta.annotation.PostConstruct; import jakarta.annotation.Priority; import jakarta.enterprise.inject.Instance; -import jakarta.inject.Inject; +import jakarta.enterprise.inject.spi.CDI; import jakarta.ws.rs.Priorities; import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import org.eclipse.microprofile.config.ConfigProvider; -import org.eclipse.microprofile.config.inject.ConfigProperty; import org.jboss.logging.Logger; import org.jboss.resteasy.reactive.client.spi.ResteasyReactiveClientRequestContext; import org.jboss.resteasy.reactive.client.spi.ResteasyReactiveClientRequestFilter; @@ -38,16 +36,7 @@ public class AccessTokenRequestReactiveFilter implements ResteasyReactiveClientR private static final String BEARER_SCHEME_WITH_SPACE = "Bearer "; private static final String ERROR_MSG = "OIDC Token Propagation Reactive requires a safe (isolated) Vert.x sub-context because configuration property 'quarkus.oidc-token-propagation-reactive.enabled-during-authentication' has been set to true, but the current context hasn't been flagged as such."; private final boolean enabledDuringAuthentication; - - @Inject - Instance accessToken; - - @Inject - @ConfigProperty(name = "quarkus.oidc-token-propagation-reactive.client-name") - Optional oidcClientName; - @Inject - @ConfigProperty(name = "quarkus.oidc-token-propagation-reactive.exchange-token") - boolean exchangeToken; + private final Instance accessToken; OidcClient exchangeTokenClient; String exchangeTokenProperty; @@ -55,6 +44,7 @@ public class AccessTokenRequestReactiveFilter implements ResteasyReactiveClientR public AccessTokenRequestReactiveFilter() { this.enabledDuringAuthentication = Boolean.getBoolean(OIDC_PROPAGATE_TOKEN_CREDENTIAL) || Boolean.getBoolean(JWT_PROPAGATE_TOKEN_CREDENTIAL); + this.accessToken = CDI.current().select(TokenCredential.class); } @PostConstruct @@ -80,7 +70,8 @@ public void initExchangeTokenClient() { } protected boolean isExchangeToken() { - return exchangeToken; + return ConfigProvider.getConfig() + .getValue("quarkus.oidc-token-propagation-reactive.exchange-token", boolean.class); } @Override @@ -119,7 +110,10 @@ public void accept(Throwable t) { } protected String getClientName() { - return oidcClientName.orElse(null); + return ConfigProvider + .getConfig() + .getOptionalValue("quarkus.oidc-token-propagation-reactive.client-name", String.class) + .orElse(null); } public void propagateToken(ResteasyReactiveClientRequestContext requestContext, String accessToken) { diff --git a/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java b/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java index 9990ca1481ab84..127790bca90943 100644 --- a/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java +++ b/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java @@ -3,11 +3,14 @@ import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; +import java.util.List; import java.util.function.BooleanSupplier; import org.jboss.jandex.DotName; import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import io.quarkus.arc.deployment.GeneratedBeanBuildItem; +import io.quarkus.arc.deployment.UnremovableBeanBuildItem; import io.quarkus.deployment.Capabilities; import io.quarkus.deployment.Capability; import io.quarkus.deployment.annotations.BuildProducer; @@ -15,20 +18,21 @@ import io.quarkus.deployment.annotations.BuildSteps; import io.quarkus.deployment.builditem.SystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.client.deployment.AccessTokenInstanceBuildItem; +import io.quarkus.oidc.client.deployment.AccessTokenRequestFilterGenerator; import io.quarkus.oidc.token.propagation.AccessTokenRequestFilter; import io.quarkus.oidc.token.propagation.JsonWebToken; import io.quarkus.oidc.token.propagation.JsonWebTokenRequestFilter; import io.quarkus.oidc.token.propagation.runtime.OidcTokenPropagationBuildTimeConfig; import io.quarkus.oidc.token.propagation.runtime.OidcTokenPropagationConfig; import io.quarkus.restclient.deployment.RestClientAnnotationProviderBuildItem; +import io.quarkus.restclient.deployment.RestClientPredicateProviderBuildItem; import io.quarkus.resteasy.common.spi.ResteasyJaxrsProviderBuildItem; import io.quarkus.runtime.configuration.ConfigurationException; @BuildSteps(onlyIf = OidcTokenPropagationBuildStep.IsEnabled.class) public class OidcTokenPropagationBuildStep { - private static final DotName ACCESS_TOKEN_CREDENTIAL = DotName.createSimple(AccessToken.class.getName()); private static final DotName JWT_ACCESS_TOKEN_CREDENTIAL = DotName.createSimple(JsonWebToken.class.getName()); OidcTokenPropagationConfig config; @@ -37,6 +41,10 @@ public class OidcTokenPropagationBuildStep { void registerProvider(BuildProducer additionalBeans, BuildProducer reflectiveClass, BuildProducer jaxrsProviders, + BuildProducer providerPredicateProducer, + BuildProducer generatedBeanProducer, + BuildProducer unremovableBeanProducer, + List accessTokenInstances, BuildProducer restAnnotationProvider) { additionalBeans.produce(AdditionalBeanBuildItem.unremovableOf(AccessTokenRequestFilter.class)); additionalBeans.produce(AdditionalBeanBuildItem.unremovableOf(JsonWebTokenRequestFilter.class)); @@ -49,10 +57,17 @@ void registerProvider(BuildProducer additionalBeans, Class filterClass = config.jsonWebToken ? JsonWebTokenRequestFilter.class : AccessTokenRequestFilter.class; jaxrsProviders.produce(new ResteasyJaxrsProviderBuildItem(filterClass.getName())); } else { - restAnnotationProvider.produce(new RestClientAnnotationProviderBuildItem(ACCESS_TOKEN_CREDENTIAL, - AccessTokenRequestFilter.class)); restAnnotationProvider.produce(new RestClientAnnotationProviderBuildItem(JWT_ACCESS_TOKEN_CREDENTIAL, JsonWebTokenRequestFilter.class)); + if (!accessTokenInstances.isEmpty()) { + var filterGenerator = new AccessTokenRequestFilterGenerator(unremovableBeanProducer, reflectiveClass, + generatedBeanProducer, AccessTokenRequestFilter.class); + for (AccessTokenInstanceBuildItem instance : accessTokenInstances) { + String providerClass = filterGenerator.generateClass(instance); + providerPredicateProducer.produce(new RestClientPredicateProviderBuildItem(providerClass, + ci -> instance.targetClass().equals(ci.name().toString()))); + } + } } } diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenAnnotationTest.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenAnnotationTest.java new file mode 100644 index 00000000000000..bce883c7057662 --- /dev/null +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenAnnotationTest.java @@ -0,0 +1,177 @@ +package io.quarkus.oidc.token.propagation; + +import static org.hamcrest.Matchers.equalTo; + +import jakarta.annotation.security.RolesAllowed; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.QueryParam; + +import org.eclipse.microprofile.jwt.JsonWebToken; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Unremovable; +import io.quarkus.test.QuarkusUnitTest; +import io.quarkus.test.common.QuarkusTestResource; +import io.quarkus.test.oidc.client.OidcTestClient; +import io.quarkus.test.oidc.server.OidcWiremockTestResource; +import io.restassured.RestAssured; + +@QuarkusTestResource(OidcWiremockTestResource.class) +public class AccessTokenAnnotationTest { + + final static OidcTestClient client = new OidcTestClient(); + + @RegisterExtension + static final QuarkusUnitTest test = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(DefaultClientDefaultExchange.class, DefaultClientEnabledExchange.class, + NamedClientDefaultExchange.class, MultiProviderFrontendResource.class, ProtectedResource.class, + CustomAccessTokenRequestFilter.class) + .addAsResource( + new StringAsset( + """ + quarkus.oidc.auth-server-url=${keycloak.url}/realms/quarkus + quarkus.oidc.client-id=quarkus-app + quarkus.oidc.credentials.secret=secret + + quarkus.oidc-client.auth-server-url=${quarkus.oidc.auth-server-url} + quarkus.oidc-client.client-id=${quarkus.oidc.client-id} + quarkus.oidc-client.credentials.client-secret.value=${quarkus.oidc.credentials.secret} + quarkus.oidc-client.credentials.client-secret.method=post + quarkus.oidc-client.grant.type=jwt + quarkus.oidc-client.scopes=https://graph.microsoft.com/user.read,offline_access + quarkus.oidc-client.grant-options.jwt.requested_token_use=on_behalf_of + quarkus.oidc-client.token-path=${keycloak.url}/realms/quarkus/jwt-bearer-token + + quarkus.oidc-client.named.auth-server-url=${quarkus.oidc-client.auth-server-url} + quarkus.oidc-client.named.client-id=${quarkus.oidc-client.client-id} + quarkus.oidc-client.named.credentials.client-secret.value=${quarkus.oidc-client.credentials.client-secret.value} + quarkus.oidc-client.named.credentials.client-secret.method=${quarkus.oidc-client.credentials.client-secret.method} + quarkus.oidc-client.named.grant.type=${quarkus.oidc-client.grant.type} + quarkus.oidc-client.named.scopes=${quarkus.oidc-client.scopes} + quarkus.oidc-client.named.grant-options.jwt.requested_token_use=${quarkus.oidc-client.grant-options.jwt.requested_token_use} + quarkus.oidc-client.named.token-path=${quarkus.oidc-client.token-path} + """), + "application.properties")); + + @AfterAll + public static void close() { + client.close(); + } + + @Test + public void testDefaultClientEnabledTokenExchange() { + testRestClientTokenPropagation(true, "defaultClientEnabledExchange"); + } + + @Test + public void testDefaultClientDefaultTokenExchange() { + testRestClientTokenPropagation(false, "defaultClientDefaultExchange"); + } + + @Test + public void testNamedClientDefaultTokenExchange() { + testRestClientTokenPropagation(true, "namedClientDefaultExchange"); + } + + private void testRestClientTokenPropagation(boolean exchangeEnabled, String clientKey) { + String newTokenUsername = exchangeEnabled ? "bob" : "alice"; + RestAssured.given().auth().oauth2(getBearerAccessToken()) + .queryParam("client-key", clientKey) + .when().get("/frontend/token-propagation") + .then() + .statusCode(200) + .body(equalTo("original token username: alice new token username: " + newTokenUsername)); + } + + public String getBearerAccessToken() { + return client.getAccessToken("alice", "alice"); + } + + @RegisterRestClient(baseUri = "http://localhost:8081/protected") + @AccessToken + @Path("/") + public interface DefaultClientDefaultExchange { + @GET + String getUserName(); + } + + @RegisterRestClient(baseUri = "http://localhost:8081/protected") + @AccessToken(exchangeTokenClient = "Default") + @Path("/") + public interface DefaultClientEnabledExchange { + @GET + String getUserName(); + } + + @RegisterRestClient(baseUri = "http://localhost:8081/protected") + @AccessToken(exchangeTokenClient = "named") + @Path("/") + public interface NamedClientDefaultExchange { + @GET + String getUserName(); + } + + // tests no AmbiguousResolutionException is raised + @Singleton + @Unremovable + public static class CustomAccessTokenRequestFilter extends AccessTokenRequestFilter { + } + + @Path("/frontend") + public static class MultiProviderFrontendResource { + @Inject + @RestClient + DefaultClientDefaultExchange defaultClientDefaultExchange; + + @Inject + @RestClient + DefaultClientEnabledExchange defaultClientEnabledExchange; + + @Inject + @RestClient + NamedClientDefaultExchange namedClientDefaultExchange; + + @Inject + JsonWebToken jwt; + + @GET + @Path("token-propagation") + @RolesAllowed("admin") + public String userNameTokenPropagation(@QueryParam("client-key") String clientKey) { + return getResponseWithExchangedUsername(clientKey); + } + + @GET + @Path("token-propagation-with-augmentor") + @RolesAllowed("tester") // tester role is granted by SecurityIdentityAugmentor + public String userNameTokenPropagationWithSecIdentityAugmentor(@QueryParam("client-key") String clientKey) { + return getResponseWithExchangedUsername(clientKey); + } + + private String getResponseWithExchangedUsername(String clientKey) { + if ("alice".equals(jwt.getName())) { + return "original token username: " + jwt.getName() + " new token username: " + getUserName(clientKey); + } else { + throw new RuntimeException(); + } + } + + private String getUserName(String clientKey) { + return switch (clientKey) { + case "defaultClientDefaultExchange" -> defaultClientDefaultExchange.getUserName(); + case "defaultClientEnabledExchange" -> defaultClientEnabledExchange.getUserName(); + case "namedClientDefaultExchange" -> namedClientDefaultExchange.getUserName(); + default -> throw new IllegalArgumentException("Unknown client key"); + }; + } + } +} diff --git a/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java b/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java index f5f90f8b2d2837..9467ebcc6f12a2 100644 --- a/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java +++ b/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java @@ -5,15 +5,13 @@ import java.io.IOException; import java.util.Collections; -import java.util.Optional; import jakarta.annotation.PostConstruct; import jakarta.enterprise.inject.Instance; -import jakarta.inject.Inject; +import jakarta.enterprise.inject.spi.CDI; import jakarta.ws.rs.client.ClientRequestContext; import org.eclipse.microprofile.config.ConfigProvider; -import org.eclipse.microprofile.config.inject.ConfigProperty; import io.quarkus.arc.Arc; import io.quarkus.oidc.client.OidcClient; @@ -31,16 +29,7 @@ public class AccessTokenRequestFilter extends AbstractTokenRequestFilter { private static final String ERROR_MSG = "OIDC Token Propagation requires a safe (isolated) Vert.x sub-context because configuration property 'quarkus.oidc-token-propagation.enabled-during-authentication' has been set to true, but the current context hasn't been flagged as such."; private final boolean enabledDuringAuthentication; - - @Inject - Instance accessToken; - - @Inject - @ConfigProperty(name = "quarkus.oidc-token-propagation.client-name") - Optional oidcClientName; - @Inject - @ConfigProperty(name = "quarkus.oidc-token-propagation.exchange-token") - boolean exchangeToken; + private final Instance accessToken; OidcClient exchangeTokenClient; String exchangeTokenProperty; @@ -48,6 +37,7 @@ public class AccessTokenRequestFilter extends AbstractTokenRequestFilter { public AccessTokenRequestFilter() { this.enabledDuringAuthentication = Boolean.getBoolean(OIDC_PROPAGATE_TOKEN_CREDENTIAL) || Boolean.getBoolean(JWT_PROPAGATE_TOKEN_CREDENTIAL); + this.accessToken = CDI.current().select(TokenCredential.class); } @PostConstruct @@ -73,7 +63,7 @@ public void initExchangeTokenClient() { } protected boolean isExchangeToken() { - return exchangeToken; + return ConfigProvider.getConfig().getValue("quarkus.oidc-token-propagation.exchange-token", boolean.class); } @Override @@ -98,7 +88,8 @@ private String exchangeTokenIfNeeded(String token) { } protected String getClientName() { - return oidcClientName.orElse(null); + return ConfigProvider.getConfig().getOptionalValue("quarkus.oidc-token-propagation.client-name", String.class) + .orElse(null); } private boolean acquireTokenCredentialFromCtx(ClientRequestContext requestContext) { diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java index b3ae53ab3282b5..e7971fdf3014df 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java @@ -33,7 +33,7 @@ public class DevServicesConfig { * ends with `-legacy`. * Override with `quarkus.keycloak.devservices.keycloak-x-image`. */ - @ConfigItem(defaultValue = "quay.io/keycloak/keycloak:23.0.4") + @ConfigItem(defaultValue = "quay.io/keycloak/keycloak:23.0.7") public String imageName; /** diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevServicesProcessor.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevServicesProcessor.java index 4cc2ff2a17d2fa..c2ef04ff3d074d 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevServicesProcessor.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevServicesProcessor.java @@ -363,7 +363,7 @@ private RunningDevService startContainer(DockerStatusBuildItem dockerStatusBuild capturedDevServicesConfiguration.port, useSharedNetwork, capturedDevServicesConfiguration.realmPath.orElse(List.of()), - resourcesMap(), + resourcesMap(errors), capturedDevServicesConfiguration.serviceName, capturedDevServicesConfiguration.shared, capturedDevServicesConfiguration.javaOpts, @@ -401,12 +401,17 @@ private RunningDevService startContainer(DockerStatusBuildItem dockerStatusBuild .orElseGet(defaultKeycloakContainerSupplier); } - private Map resourcesMap() { + private Map resourcesMap(List errors) { Map resources = new HashMap<>(); for (Map.Entry aliasEntry : capturedDevServicesConfiguration.resourceAliases.entrySet()) { if (capturedDevServicesConfiguration.resourceMappings.containsKey(aliasEntry.getKey())) { resources.put(aliasEntry.getValue(), capturedDevServicesConfiguration.resourceMappings.get(aliasEntry.getKey())); + } else { + errors.add(String.format("%s alias for the %s resource does not have a mapping", aliasEntry.getKey(), + aliasEntry.getValue())); + LOG.errorf("%s alias for the %s resource does not have a mapping", aliasEntry.getKey(), + aliasEntry.getValue()); } } return resources; @@ -540,12 +545,18 @@ protected void configure() { private void mapResource(String resourcePath, String mappedResource) { if (Thread.currentThread().getContextClassLoader().getResource(resourcePath) != null) { + LOG.debugf("Mapping the classpath %s resource to %s", resourcePath, mappedResource); withClasspathResourceMapping(resourcePath, mappedResource, BindMode.READ_ONLY); } else if (Files.exists(Paths.get(resourcePath))) { + LOG.debugf("Mapping the file system %s resource to %s", resourcePath, mappedResource); withFileSystemBind(resourcePath, mappedResource, BindMode.READ_ONLY); } else { - errors.add(String.format("%s resource is not available", resourcePath)); - LOG.errorf("Realm %s resource is not available", resourcePath); + errors.add( + String.format( + "%s resource can not be mapped to %s because it is not available on the classpath and file system", + resourcePath, mappedResource)); + LOG.errorf("%s resource can not be mapped to %s because it is not available on the classpath and file system", + resourcePath, mappedResource); } } diff --git a/extensions/oidc/deployment/src/main/resources/dev-ui/qwc-oidc-provider.js b/extensions/oidc/deployment/src/main/resources/dev-ui/qwc-oidc-provider.js index fd55fda88612ba..3948bec35d168c 100644 --- a/extensions/oidc/deployment/src/main/resources/dev-ui/qwc-oidc-provider.js +++ b/extensions/oidc/deployment/src/main/resources/dev-ui/qwc-oidc-provider.js @@ -42,6 +42,7 @@ class OidcPropertiesState extends LitState { logoutUrl: null, postLogoutUriParam: null, scopes: null, + authExtraParams: null, httpPort: 0, accessToken: null, idToken: null, @@ -73,6 +74,7 @@ class OidcPropertiesState extends LitState { propertiesState.logoutUrl = response.result.logoutUrl; propertiesState.postLogoutUriParam = response.result.postLogoutUriParam; propertiesState.scopes = response.result.scopes; + propertiesState.authExtraParams = response.result.authExtraParams; propertiesState.httpPort = response.result.httpPort; propertiesState.oidcProviderName = response.result.oidcProviderName; propertiesState.oidcApplicationType = response.result.oidcApplicationType; @@ -857,6 +859,7 @@ export class QwcOidcProvider extends QwcHotReloadElement { _signInToOidcProviderAndGetTokens() { const clientId = this._getClientId(); const scopes = propertiesState.scopes ?? ''; + const authExtraParams = propertiesState.authExtraParams ?? ''; let address; let state; @@ -880,7 +883,8 @@ export class QwcOidcProvider extends QwcHotReloadElement { + "&redirect_uri=" + this._getEncodedPath() + "&scope=" + scopes + "&response_type=" + responseType + "&response_mode=query&prompt=login&nonce=" + QwcOidcProvider._makeId() - + "&state=" + state; + + "&state=" + state + + authExtraParams; } _getEncodedPath() { diff --git a/extensions/oidc/deployment/src/test/resources/application-dev-mode.properties b/extensions/oidc/deployment/src/test/resources/application-dev-mode.properties index 26c7ff4e26bd1f..4aaaec7a7eb440 100644 --- a/extensions/oidc/deployment/src/test/resources/application-dev-mode.properties +++ b/extensions/oidc/deployment/src/test/resources/application-dev-mode.properties @@ -11,3 +11,5 @@ quarkus.log.category."com.gargoylesoftware.htmlunit.javascript.host.css.CSSStyle quarkus.log.category."io.quarkus.oidc.runtime.TenantConfigContext".level=DEBUG quarkus.log.file.enable=true +# use blocking DNS lookup so that we have it tested somewhere +quarkus.oidc.use-blocking-dns-lookup=true \ No newline at end of file diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcConfigurationMetadata.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcConfigurationMetadata.java index aee8379f99360e..17d7998233c19e 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcConfigurationMetadata.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcConfigurationMetadata.java @@ -8,14 +8,14 @@ import io.vertx.core.json.JsonObject; public class OidcConfigurationMetadata { - private static final String ISSUER = "issuer"; - private static final String TOKEN_ENDPOINT = "token_endpoint"; - private static final String INTROSPECTION_ENDPOINT = "introspection_endpoint"; - private static final String AUTHORIZATION_ENDPOINT = "authorization_endpoint"; - private static final String JWKS_ENDPOINT = "jwks_uri"; - private static final String USERINFO_ENDPOINT = "userinfo_endpoint"; - private static final String END_SESSION_ENDPOINT = "end_session_endpoint"; - private static final String SCOPES_SUPPORTED = "scopes_supported"; + public static final String ISSUER = "issuer"; + public static final String TOKEN_ENDPOINT = "token_endpoint"; + public static final String INTROSPECTION_ENDPOINT = "introspection_endpoint"; + public static final String AUTHORIZATION_ENDPOINT = "authorization_endpoint"; + public static final String JWKS_ENDPOINT = "jwks_uri"; + public static final String USERINFO_ENDPOINT = "userinfo_endpoint"; + public static final String END_SESSION_ENDPOINT = "end_session_endpoint"; + public static final String SCOPES_SUPPORTED = "scopes_supported"; private final String discoveryUri; private final String tokenUri; diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java index 77cbdaa69fa9f3..19419851a94747 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java @@ -176,14 +176,32 @@ public void setIncludeClientId(boolean includeClientId) { /** * Configuration of the certificate chain which can be used to verify tokens. - * If the certificate chain trusstore is configured, the tokens can be verified using the certificate + * If the certificate chain truststore is configured, the tokens can be verified using the certificate * chain inlined in the Base64-encoded format as an `x5c` header in the token itself. + *

+ * The certificate chain inlined in the token is verified. + * Signature of every certificate in the chain but the root certificate is verified by the next certificate in the chain. + * Thumbprint of the root certificate in the chain must match a thumbprint of one of the certificates in the truststore. + *

+ * Additionally, a direct trust in the leaf chain certificate which will be used to verify the token signature must + * be established. + * By default, the leaf certificate's thumbprint must match a thumbprint of one of the certificates in the truststore. + * If the truststore does not have the leaf certificate imported, then the leaf certificate must be identified by its Common + * Name. */ @ConfigItem public CertificateChain certificateChain = new CertificateChain(); @ConfigGroup public static class CertificateChain { + /** + * Common name of the leaf certificate. It must be set if the {@link #trustStoreFile} does not have + * this certificate imported. + * + */ + @ConfigItem + public Optional leafCertificateName = Optional.empty(); + /** * Truststore file which keeps thumbprints of the trusted certificates. */ @@ -194,7 +212,7 @@ public static class CertificateChain { * A parameter to specify the password of the truststore file if it is configured with {@link #trustStoreFile}. */ @ConfigItem - public Optional trustStorePassword; + public Optional trustStorePassword = Optional.empty(); /** * A parameter to specify the alias of the truststore certificate. @@ -233,6 +251,14 @@ public Optional getTrustStoreFileType() { public void setTrustStoreFileType(Optional trustStoreFileType) { this.trustStoreFileType = trustStoreFileType; } + + public Optional getLeafCertificateName() { + return leafCertificateName; + } + + public void setLeafCertificateName(String leafCertificateName) { + this.leafCertificateName = Optional.of(leafCertificateName); + } } /** @@ -926,6 +952,13 @@ public enum ResponseMode { @ConfigItem public Optional> scopes = Optional.empty(); + /** + * The separator which is used when more than one scope is configured. + * A single space is used by default. + */ + @ConfigItem + public Optional scopeSeparator = Optional.empty(); + /** * Require that ID token includes a `nonce` claim which must match `nonce` authentication request query parameter. * Enabling this property can help mitigate replay attacks. @@ -1342,6 +1375,14 @@ public Optional getStateSecret() { public void setStateSecret(Optional stateSecret) { this.stateSecret = stateSecret; } + + public Optional getScopeSeparator() { + return scopeSeparator; + } + + public void setScopeSeparator(String scopeSeparator) { + this.scopeSeparator = Optional.of(scopeSeparator); + } } /** diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/AbstractJsonObjectResponse.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/AbstractJsonObjectResponse.java index 9dc01cc51c1569..416848f07fcf2a 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/AbstractJsonObjectResponse.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/AbstractJsonObjectResponse.java @@ -58,7 +58,7 @@ public Object get(String name) { } public boolean contains(String propertyName) { - return json.containsKey(propertyName) && !json.isNull(propertyName); + return json != null && json.containsKey(propertyName) && !json.isNull(propertyName); } public Set getPropertyNames() { diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/BearerAuthenticationMechanism.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/BearerAuthenticationMechanism.java index f6c22753ab98e0..8869e9a9bdf901 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/BearerAuthenticationMechanism.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/BearerAuthenticationMechanism.java @@ -2,6 +2,8 @@ import java.util.function.Function; +import org.jboss.logging.Logger; + import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpResponseStatus; import io.quarkus.oidc.AccessTokenCredential; @@ -15,14 +17,17 @@ import io.vertx.ext.web.RoutingContext; public class BearerAuthenticationMechanism extends AbstractOidcAuthenticationMechanism { + private static final Logger LOG = Logger.getLogger(BearerAuthenticationMechanism.class); public Uni authenticate(RoutingContext context, IdentityProviderManager identityProviderManager, OidcTenantConfig oidcTenantConfig) { + LOG.debug("Starting a bearer access token authentication"); String token = extractBearerToken(context, oidcTenantConfig); // if a bearer token is provided try to authenticate if (token != null) { return authenticate(identityProviderManager, context, new AccessTokenCredential(token)); } + LOG.debug("Bearer access token is not available"); return Uni.createFrom().nullItem(); } @@ -41,6 +46,7 @@ private String extractBearerToken(RoutingContext context, OidcTenantConfig oidcC final HttpServerRequest request = context.request(); String header = oidcConfig.token.header.isPresent() ? oidcConfig.token.header.get() : HttpHeaders.AUTHORIZATION.toString(); + LOG.debugf("Looking for a token in the %s header", header); final String headerValue = request.headers().get(header); if (headerValue == null) { @@ -50,6 +56,10 @@ private String extractBearerToken(RoutingContext context, OidcTenantConfig oidcC int idx = headerValue.indexOf(' '); final String scheme = idx > 0 ? headerValue.substring(0, idx) : null; + if (scheme != null) { + LOG.debugf("Authorization scheme: %s", scheme); + } + if (scheme == null && !header.equalsIgnoreCase(HttpHeaders.AUTHORIZATION.toString())) { return headerValue; } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CertChainPublicKeyResolver.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CertChainPublicKeyResolver.java index ae0105fce3bcf3..069ad2efb7704d 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CertChainPublicKeyResolver.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CertChainPublicKeyResolver.java @@ -3,6 +3,7 @@ import java.security.Key; import java.security.cert.X509Certificate; import java.util.List; +import java.util.Optional; import java.util.Set; import org.jboss.logging.Logger; @@ -12,11 +13,13 @@ import io.quarkus.oidc.OidcTenantConfig.CertificateChain; import io.quarkus.runtime.configuration.ConfigurationException; +import io.quarkus.security.runtime.X509IdentityProvider; import io.vertx.ext.auth.impl.CertificateHelper; public class CertChainPublicKeyResolver implements RefreshableVerificationKeyResolver { private static final Logger LOG = Logger.getLogger(OidcProvider.class); final Set thumbprints; + final Optional expectedLeafCertificateName; public CertChainPublicKeyResolver(CertificateChain chain) { if (chain.trustStorePassword.isEmpty()) { @@ -25,6 +28,7 @@ public CertChainPublicKeyResolver(CertificateChain chain) { } this.thumbprints = TrustStoreUtils.getTrustedCertificateThumbprints(chain.trustStoreFile.get(), chain.trustStorePassword.get(), chain.trustStoreCertAlias, chain.getTrustStoreFileType()); + this.expectedLeafCertificateName = chain.leafCertificateName; } @Override @@ -37,9 +41,29 @@ public Key resolveKey(JsonWebSignature jws, List nestingContex LOG.debug("Token does not have an 'x5c' certificate chain header"); return null; } - String thumbprint = TrustStoreUtils.calculateThumprint(chain.get(0)); - if (!thumbprints.contains(thumbprint)) { - throw new UnresolvableKeyException("Certificate chain thumprint is invalid"); + if (chain.size() == 0) { + LOG.debug("Token 'x5c' certificate chain is empty"); + return null; + } + LOG.debug("Checking a thumbprint of the root chain certificate"); + String rootThumbprint = TrustStoreUtils.calculateThumprint(chain.get(chain.size() - 1)); + if (!thumbprints.contains(rootThumbprint)) { + LOG.error("Thumprint of the root chain certificate is invalid"); + throw new UnresolvableKeyException("Thumprint of the root chain certificate is invalid"); + } + if (expectedLeafCertificateName.isEmpty()) { + LOG.debug("Checking a thumbprint of the leaf chain certificate"); + String thumbprint = TrustStoreUtils.calculateThumprint(chain.get(0)); + if (!thumbprints.contains(thumbprint)) { + LOG.error("Thumprint of the leaf chain certificate is invalid"); + throw new UnresolvableKeyException("Thumprint of the leaf chain certificate is invalid"); + } + } else { + String leafCertificateName = X509IdentityProvider.getCommonName(chain.get(0).getSubjectX500Principal()); + if (!expectedLeafCertificateName.get().equals(leafCertificateName)) { + LOG.errorf("Wrong leaf certificate common name: %s", leafCertificateName); + throw new UnresolvableKeyException("Wrong leaf certificate common name"); + } } //TODO: support revocation lists CertificateHelper.checkValidity(chain, null); @@ -50,6 +74,8 @@ public Key resolveKey(JsonWebSignature jws, List nestingContex root.verify(root.getPublicKey()); } return chain.get(0).getPublicKey(); + } catch (UnresolvableKeyException ex) { + throw ex; } catch (Exception ex) { throw new UnresolvableKeyException("Invalid certificate chain", ex); } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DynamicVerificationKeyResolver.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DynamicVerificationKeyResolver.java index 3844bd400cbfd2..dbb2adeb2af491 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DynamicVerificationKeyResolver.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DynamicVerificationKeyResolver.java @@ -1,8 +1,10 @@ package io.quarkus.oidc.runtime; import java.security.Key; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import jakarta.enterprise.event.Observes; @@ -24,6 +26,9 @@ public class DynamicVerificationKeyResolver { private static final Logger LOG = Logger.getLogger(DynamicVerificationKeyResolver.class); + private static final Set KEY_HEADERS = Set.of(HeaderParameterNames.KEY_ID, + HeaderParameterNames.X509_CERTIFICATE_SHA256_THUMBPRINT, + HeaderParameterNames.X509_CERTIFICATE_THUMBPRINT); private final OidcProviderClient client; private final MemoryCache cache; @@ -46,6 +51,12 @@ public Uni resolve(TokenCredential tokenCred) { if (key != null) { return Uni.createFrom().item(new SingleKeyVerificationKeyResolver(key)); } + if (chainResolverFallback != null && headers.containsKey(HeaderParameterNames.X509_CERTIFICATE_CHAIN) + && Collections.disjoint(KEY_HEADERS, headers.fieldNames())) { + // If none of the key headers is available which can be used to resolve JWK then do + // not try to get another JWK set but delegate to the chain resolver fallback if it is available + return getChainResolver(); + } return client.getJsonWebKeySet(new OidcRequestContextProperties( Map.of(OidcRequestContextProperties.TOKEN, tokenCred.getToken(), @@ -105,9 +116,7 @@ public Uni apply(JsonWebKeySet jwks) { } if (newKey == null && chainResolverFallback != null) { - LOG.debug("JWK is not available, neither 'kid' nor 'x5t#S256' nor 'x5t' token headers are set," - + " falling back to the certificate chain resolver"); - return Uni.createFrom().item(chainResolverFallback); + return getChainResolver(); } if (newKey == null) { @@ -121,6 +130,12 @@ public Uni apply(JsonWebKeySet jwks) { }); } + private Uni getChainResolver() { + LOG.debug("JWK is not available, neither 'kid' nor 'x5t#S256' nor 'x5t' token headers are set," + + " falling back to the certificate chain resolver"); + return Uni.createFrom().item(chainResolverFallback); + } + private static Key getKeyWithId(JsonWebKeySet jwks, String kid) { if (kid != null) { return jwks.getKeyWithId(kid); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcAuthenticationMechanism.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcAuthenticationMechanism.java index bd5c8ab18aeea1..b17902078794c3 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcAuthenticationMechanism.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcAuthenticationMechanism.java @@ -6,6 +6,8 @@ import jakarta.enterprise.context.ApplicationScoped; +import org.jboss.logging.Logger; + import io.quarkus.oidc.OIDCException; import io.quarkus.oidc.OidcTenantConfig; import io.quarkus.oidc.OidcTenantConfig.ApplicationType; @@ -23,6 +25,8 @@ @ApplicationScoped public class OidcAuthenticationMechanism implements HttpAuthenticationMechanism { + private static final Logger LOG = Logger.getLogger(OidcAuthenticationMechanism.class); + private static HttpCredentialTransport OIDC_WEB_APP_TRANSPORT = new HttpCredentialTransport( HttpCredentialTransport.Type.AUTHORIZATION_CODE, OidcConstants.CODE_FLOW_CODE); @@ -75,6 +79,7 @@ public OidcTenantConfig apply(OidcTenantConfig oidcTenantConfig) { if (oidcTenantConfig == null) { throw new OIDCException("Tenant configuration has not been resolved"); } + LOG.debugf("Resolved OIDC tenant id: %s", oidcTenantConfig.tenantId.orElse(OidcUtils.DEFAULT_TENANT_ID)); return oidcTenantConfig; }; }); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcConfigPropertySupplier.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcConfigPropertySupplier.java index 68b586664872f6..9a6ddce2b1983b 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcConfigPropertySupplier.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcConfigPropertySupplier.java @@ -1,6 +1,7 @@ package io.quarkus.oidc.runtime; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Supplier; @@ -13,6 +14,7 @@ import io.quarkus.oidc.common.runtime.OidcCommonUtils; import io.quarkus.oidc.common.runtime.OidcConstants; import io.quarkus.oidc.runtime.providers.KnownOidcProviders; +import io.smallrye.config.SmallRyeConfig; public class OidcConfigPropertySupplier implements Supplier { private static final String AUTH_SERVER_URL_CONFIG_KEY = "quarkus.oidc.auth-server-url"; @@ -23,6 +25,7 @@ public class OidcConfigPropertySupplier implements Supplier { TOKEN_PATH_CONFIG_KEY, AUTH_PATH_CONFIG_KEY); private static final String OIDC_PROVIDER_CONFIG_KEY = "quarkus.oidc.provider"; private static final String SCOPES_KEY = "quarkus.oidc.authentication.scopes"; + private static final String AUTH_EXTRA_PARAMS_KEY = "quarkus.oidc.authentication.extra-params"; private String oidcConfigProperty; private String defaultValue; private boolean urlProperty; @@ -119,6 +122,22 @@ public String get(Config config) { } else { return OidcConstants.OPENID_SCOPE; } + } else if (AUTH_EXTRA_PARAMS_KEY.equals(oidcConfigProperty)) { + StringBuilder sb = new StringBuilder(); + if (config instanceof SmallRyeConfig) { + Optional> extraParams = ((SmallRyeConfig) config).getOptionalValues(oidcConfigProperty, + String.class, + String.class); + if (extraParams.isPresent()) { + for (Map.Entry entry : extraParams.get().entrySet()) { + if (entry.getKey().equals(OidcConstants.TOKEN_SCOPE)) { + continue; + } + sb.append("&").append(entry.getKey()).append("=").append(OidcCommonUtils.urlEncode(entry.getValue())); + } + } + } + return sb.toString(); } else { return checkUrlProperty(config.getOptionalValue(oidcConfigProperty, String.class), providerConfig, config); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java index 711baa25e5435e..6c93dccc5510e3 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java @@ -18,6 +18,7 @@ import io.quarkus.oidc.AccessTokenCredential; import io.quarkus.oidc.IdTokenCredential; +import io.quarkus.oidc.OIDCException; import io.quarkus.oidc.OidcTenantConfig; import io.quarkus.oidc.OidcTenantConfig.Roles.Source; import io.quarkus.oidc.TokenIntrospection; @@ -46,7 +47,6 @@ public class OidcIdentityProvider implements IdentityProvider NULL_CODE_ACCESS_TOKEN_UNI = Uni.createFrom().nullItem(); - private static final String CODE_ACCESS_TOKEN_RESULT = "code_flow_access_token_result"; protected final DefaultTenantConfigResolver tenantResolver; private final BlockingTaskRunner uniVoidOidcContext; @@ -99,14 +99,16 @@ protected Map getRequestData(TokenAuthenticationRequest request) private Uni authenticate(TokenAuthenticationRequest request, Map requestData, TenantConfigContext resolvedContext) { - if (resolvedContext.oidcConfig.publicKey.isPresent()) { - LOG.debug("Performing token verification with a configured public key"); - return validateTokenWithoutOidcServer(request, resolvedContext); + if (resolvedContext.oidcConfig.authServerUrl.isPresent()) { + return validateAllTokensWithOidcServer(requestData, request, resolvedContext); } else if (resolvedContext.oidcConfig.getCertificateChain().trustStoreFile.isPresent()) { LOG.debug("Performing token verification with a public key inlined in the certificate chain"); return validateTokenWithoutOidcServer(request, resolvedContext); + } else if (resolvedContext.oidcConfig.publicKey.isPresent()) { + LOG.debug("Performing token verification with a configured public key"); + return validateTokenWithoutOidcServer(request, resolvedContext); } else { - return validateAllTokensWithOidcServer(requestData, request, resolvedContext); + return Uni.createFrom().failure(new OIDCException("Unexpected authentication request")); } } @@ -149,30 +151,7 @@ public Uni apply(UserInfo userInfo, Throwable t) { isIdToken(request), null); } - // Verify Code Flow access token first if it is available and has to be verified. - // It may be refreshed if it has or has nearly expired - Uni codeAccessTokenUni = verifyCodeFlowAccessTokenUni(requestData, request, - resolvedContext, - null); - return codeAccessTokenUni.onItemOrFailure().transformToUni( - new BiFunction>() { - @Override - public Uni apply(TokenVerificationResult codeAccessTokenResult, Throwable t) { - if (t != null) { - return Uni.createFrom().failure(t instanceof AuthenticationFailedException ? t - : new AuthenticationFailedException(t)); - } - if (codeAccessTokenResult != null) { - if (tokenAutoRefreshPrepared(codeAccessTokenResult, requestData, - resolvedContext.oidcConfig)) { - return Uni.createFrom().failure(new TokenAutoRefreshException(null)); - } - requestData.put(CODE_ACCESS_TOKEN_RESULT, codeAccessTokenResult); - } - return getUserInfoAndCreateIdentity(primaryTokenUni, requestData, request, resolvedContext); - } - }); - + return getUserInfoAndCreateIdentity(primaryTokenUni, requestData, request, resolvedContext); } } @@ -191,7 +170,7 @@ public Uni apply(TokenVerificationResult codeAccessToken, Thro } if (codeAccessToken != null) { - requestData.put(CODE_ACCESS_TOKEN_RESULT, codeAccessToken); + requestData.put(OidcUtils.CODE_ACCESS_TOKEN_RESULT, codeAccessToken); } Uni tokenUni = verifyTokenUni(requestData, resolvedContext, @@ -217,7 +196,8 @@ public Uni apply(TokenVerificationResult result, Throwable t) } private Uni getUserInfoAndCreateIdentity(Uni tokenUni, - Map requestData, TokenAuthenticationRequest request, + Map requestData, + TokenAuthenticationRequest request, TenantConfigContext resolvedContext) { return tokenUni.onItemOrFailure() @@ -227,21 +207,49 @@ public Uni apply(TokenVerificationResult result, Throwable t) if (t != null) { return Uni.createFrom().failure(new AuthenticationFailedException(t)); } - if (resolvedContext.oidcConfig.authentication.isUserInfoRequired().orElse(false)) { - return getUserInfoUni(requestData, request, resolvedContext).onItemOrFailure().transformToUni( - new BiFunction>() { - @Override - public Uni apply(UserInfo userInfo, Throwable t) { - if (t != null) { - return Uni.createFrom().failure(new AuthenticationFailedException(t)); + + Uni codeAccessTokenUni = verifyCodeFlowAccessTokenUni(requestData, request, + resolvedContext, + null); + return codeAccessTokenUni.onItemOrFailure().transformToUni( + new BiFunction>() { + @Override + public Uni apply(TokenVerificationResult codeAccessTokenResult, + Throwable t) { + if (t != null) { + return Uni.createFrom().failure(t instanceof AuthenticationFailedException ? t + : new AuthenticationFailedException(t)); + } + if (codeAccessTokenResult != null) { + if (tokenAutoRefreshPrepared(codeAccessTokenResult, requestData, + resolvedContext.oidcConfig)) { + return Uni.createFrom().failure(new TokenAutoRefreshException(null)); } + requestData.put(OidcUtils.CODE_ACCESS_TOKEN_RESULT, codeAccessTokenResult); + } + + if (resolvedContext.oidcConfig.authentication.isUserInfoRequired().orElse(false)) { + return getUserInfoUni(requestData, request, resolvedContext).onItemOrFailure() + .transformToUni( + new BiFunction>() { + @Override + public Uni apply(UserInfo userInfo, + Throwable t) { + if (t != null) { + return Uni.createFrom() + .failure(new AuthenticationFailedException(t)); + } + return createSecurityIdentityWithOidcServer(result, + requestData, request, + resolvedContext, userInfo); + } + }); + } else { return createSecurityIdentityWithOidcServer(result, requestData, request, - resolvedContext, userInfo); + resolvedContext, null); } - }); - } else { - return createSecurityIdentityWithOidcServer(result, requestData, request, resolvedContext, null); - } + } + }); } }); @@ -405,7 +413,8 @@ private static JsonObject getRolesJson(Map requestData, TenantCo rolesJson = new JsonObject(userInfo.getJsonObject().toString()); } else if (tokenCred instanceof IdTokenCredential && resolvedContext.oidcConfig.roles.source.get() == Source.accesstoken) { - rolesJson = ((TokenVerificationResult) requestData.get(CODE_ACCESS_TOKEN_RESULT)).localVerificationResult; + rolesJson = ((TokenVerificationResult) requestData + .get(OidcUtils.CODE_ACCESS_TOKEN_RESULT)).localVerificationResult; if (rolesJson == null) { // JSON token representation may be null not only if it is an opaque access token // but also if it is JWT and no JWK with a matching kid is available, asynchronous diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java index 77f389b5c4baed..ecd207f38ded8b 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProvider.java @@ -87,8 +87,15 @@ public OidcProvider(OidcProviderClient client, OidcTenantConfig oidcConfig, Json this.client = client; this.oidcConfig = oidcConfig; this.tokenCustomizer = tokenCustomizer; - this.asymmetricKeyResolver = jwks == null ? null - : new JsonWebKeyResolver(jwks, oidcConfig.token.forcedJwkRefreshInterval, oidcConfig.certificateChain); + if (jwks != null) { + this.asymmetricKeyResolver = new JsonWebKeyResolver(jwks, oidcConfig.token.forcedJwkRefreshInterval, + oidcConfig.certificateChain); + } else if (oidcConfig != null && oidcConfig.certificateChain.trustStoreFile.isPresent()) { + this.asymmetricKeyResolver = new CertChainPublicKeyResolver(oidcConfig.certificateChain); + } else { + this.asymmetricKeyResolver = null; + } + if (client != null && oidcConfig != null && !oidcConfig.jwks.resolveEarly) { this.keyResolverProvider = new DynamicVerificationKeyResolver(client, oidcConfig); } else { diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClient.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClient.java index d80d02f7c5474e..3090c67ade9f29 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClient.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClient.java @@ -22,7 +22,6 @@ import io.quarkus.oidc.common.runtime.OidcCommonConfig.Credentials.Secret.Method; import io.quarkus.oidc.common.runtime.OidcCommonUtils; import io.quarkus.oidc.common.runtime.OidcConstants; -import io.quarkus.oidc.common.runtime.OidcEndpointAccessException; import io.smallrye.mutiny.Uni; import io.smallrye.mutiny.groups.UniOnItem; import io.vertx.core.Vertx; @@ -85,16 +84,22 @@ public OidcConfigurationMetadata getMetadata() { } public Uni getJsonWebKeySet(OidcRequestContextProperties contextProperties) { - return filter(OidcEndpoint.Type.JWKS, client.getAbs(metadata.getJsonWebKeySetUri()), null, contextProperties).send() + return OidcCommonUtils + .sendRequest(vertx, + filter(OidcEndpoint.Type.JWKS, client.getAbs(metadata.getJsonWebKeySetUri()), null, contextProperties), + oidcConfig.useBlockingDnsLookup) .onItem() .transform(resp -> getJsonWebKeySet(resp)); } public Uni getUserInfo(String token) { LOG.debugf("Get UserInfo on: %s auth: %s", metadata.getUserInfoUri(), OidcConstants.BEARER_SCHEME + " " + token); - return filter(OidcEndpoint.Type.USERINFO, client.getAbs(metadata.getUserInfoUri()), null, null) - .putHeader(AUTHORIZATION_HEADER, OidcConstants.BEARER_SCHEME + " " + token) - .send().onItem().transform(resp -> getUserInfo(resp)); + return OidcCommonUtils + .sendRequest(vertx, + filter(OidcEndpoint.Type.USERINFO, client.getAbs(metadata.getUserInfoUri()), null, null) + .putHeader(AUTHORIZATION_HEADER, OidcConstants.BEARER_SCHEME + " " + token), + oidcConfig.useBlockingDnsLookup) + .onItem().transform(resp -> getUserInfo(resp)); } public Uni introspectToken(String token) { @@ -109,7 +114,7 @@ private JsonWebKeySet getJsonWebKeySet(HttpResponse resp) { if (resp.statusCode() == 200) { return new JsonWebKeySet(resp.bodyAsString(StandardCharsets.UTF_8.name())); } else { - throw new OidcEndpointAccessException(resp.statusCode()); + throw responseException(metadata.getJsonWebKeySetUri(), resp); } } @@ -201,7 +206,7 @@ private UniOnItem> getHttpResponse(String uri, MultiMap for } private AuthorizationCodeTokens getAuthorizationCodeTokens(HttpResponse resp) { - JsonObject json = getJsonObject(resp); + JsonObject json = getJsonObject(metadata.getAuthorizationUri(), resp); final String idToken = json.getString(OidcConstants.ID_TOKEN_VALUE); final String accessToken = json.getString(OidcConstants.ACCESS_TOKEN_VALUE); final String refreshToken = json.getString(OidcConstants.REFRESH_TOKEN_VALUE); @@ -209,35 +214,41 @@ private AuthorizationCodeTokens getAuthorizationCodeTokens(HttpResponse } private UserInfo getUserInfo(HttpResponse resp) { - return new UserInfo(getString(resp)); + return new UserInfo(getString(metadata.getUserInfoUri(), resp)); } private TokenIntrospection getTokenIntrospection(HttpResponse resp) { - return new TokenIntrospection(getString(resp)); + return new TokenIntrospection(getString(metadata.getIntrospectionUri(), resp)); } - private static JsonObject getJsonObject(HttpResponse resp) { + private static JsonObject getJsonObject(String requestUri, HttpResponse resp) { if (resp.statusCode() == 200) { LOG.debugf("Request succeeded: %s", resp.bodyAsJsonObject()); return resp.bodyAsJsonObject(); } else { - throw responseException(resp); + throw responseException(requestUri, resp); } } - private static String getString(HttpResponse resp) { + private static String getString(String requestUri, HttpResponse resp) { if (resp.statusCode() == 200) { LOG.debugf("Request succeeded: %s", resp.bodyAsString()); return resp.bodyAsString(); } else { - throw responseException(resp); + throw responseException(requestUri, resp); } } - private static OIDCException responseException(HttpResponse resp) { + private static OIDCException responseException(String requestUri, HttpResponse resp) { String errorMessage = resp.bodyAsString(); - LOG.debugf("Request has failed: status: %d, error message: %s", resp.statusCode(), errorMessage); - throw new OIDCException(errorMessage); + + if (errorMessage != null && !errorMessage.isEmpty()) { + LOG.errorf("Request %s has failed: status: %d, error message: %s", requestUri, resp.statusCode(), errorMessage); + throw new OIDCException(errorMessage); + } else { + LOG.errorf("Request %s has failed: status: %d", requestUri, resp.statusCode()); + throw new OIDCException("Error status:" + resp.statusCode()); + } } @Override diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java index 376419f0758bd8..71fc6236001919 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java @@ -470,8 +470,8 @@ protected static Uni createOidcClientUni(OidcTenantConfig oi WebClientOptions options = new WebClientOptions(); OidcCommonUtils.setHttpClientOptions(oidcConfig, tlsConfig, options); - - WebClient client = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options); + var mutinyVertx = new io.vertx.mutiny.core.Vertx(vertx); + WebClient client = WebClient.create(mutinyVertx, options); Map> oidcRequestFilters = OidcCommonUtils.getOidcRequestFilters(); @@ -481,7 +481,8 @@ protected static Uni createOidcClientUni(OidcTenantConfig oi } else { final long connectionDelayInMillisecs = OidcCommonUtils.getConnectionDelayInMillis(oidcConfig); metadataUni = OidcCommonUtils - .discoverMetadata(client, oidcRequestFilters, authServerUriString, connectionDelayInMillisecs) + .discoverMetadata(client, oidcRequestFilters, authServerUriString, connectionDelayInMillisecs, mutinyVertx, + oidcConfig.useBlockingDnsLookup) .onItem() .transform(new Function() { @Override diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTokenCredentialProducer.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTokenCredentialProducer.java index 2c5513e10aed8b..fff629c7bffb56 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTokenCredentialProducer.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTokenCredentialProducer.java @@ -9,6 +9,7 @@ import org.jboss.logging.Logger; import io.quarkus.oidc.AccessTokenCredential; +import io.quarkus.oidc.IdToken; import io.quarkus.oidc.IdTokenCredential; import io.quarkus.oidc.RefreshToken; import io.quarkus.oidc.TokenIntrospection; @@ -78,13 +79,43 @@ UserInfo currentUserInfo() { } /** - * The producer method for the current UserInfo + * The producer method for the ID token TokenIntrospection only. * - * @return the user info + * @return the ID token introspection */ @Produces @RequestScoped - TokenIntrospection currentTokenIntrospection() { + @IdToken + TokenIntrospection idTokenIntrospection() { + return tokenIntrospectionFromIdentityAttribute(); + } + + /** + * The producer method for the current TokenIntrospection. + *

+ * This TokenIntrospection always represents the bearer access token introspection when the bearer access tokens + * are used. + *

+ * In case of the authorization code flow, it represents a code flow access token introspection + * if it has been enabled by setting the `quarkus.oidc.authentication.verify-access-token` property to `true` + * and an ID token introspection otherwise. Use the `@IdToken` qualifier if both ID and code flow access tokens + * must be introspected. + * + * @return the token introspection + */ + @Produces + @RequestScoped + TokenIntrospection tokenIntrospection() { + TokenVerificationResult codeFlowAccessTokenResult = (TokenVerificationResult) identity + .getAttribute(OidcUtils.CODE_ACCESS_TOKEN_RESULT); + if (codeFlowAccessTokenResult == null) { + return tokenIntrospectionFromIdentityAttribute(); + } else { + return codeFlowAccessTokenResult.introspectionResult; + } + } + + TokenIntrospection tokenIntrospectionFromIdentityAttribute() { TokenIntrospection introspection = (TokenIntrospection) identity.getAttribute(OidcUtils.INTROSPECTION_ATTRIBUTE); if (introspection == null) { LOG.trace("TokenIntrospection is null"); @@ -92,4 +123,5 @@ TokenIntrospection currentTokenIntrospection() { } return introspection; } + } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java index 763a991177bfc7..38b4c6543fe629 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java @@ -82,7 +82,9 @@ public final class OidcUtils { public static final String STATE_COOKIE_NAME = "q_auth"; public static final Integer MAX_COOKIE_VALUE_LENGTH = 4096; public static final String POST_LOGOUT_COOKIE_NAME = "q_post_logout"; + public static final String DEFAULT_SCOPE_SEPARATOR = " "; static final String UNDERSCORE = "_"; + static final String CODE_ACCESS_TOKEN_RESULT = "code_flow_access_token_result"; static final String COMMA = ","; static final Uni VOID_UNI = Uni.createFrom().voidItem(); static final BlockingTaskRunner deleteTokensRequestContext = new BlockingTaskRunner(); @@ -350,6 +352,10 @@ static QuarkusSecurityIdentity validateAndCreateIdentity(Map req setSecurityIdentityConfigMetadata(builder, resolvedContext); setBlockingApiAttribute(builder, vertxContext); setTenantIdAttribute(builder, config); + TokenVerificationResult codeFlowAccessTokenResult = (TokenVerificationResult) requestData.get(CODE_ACCESS_TOKEN_RESULT); + if (codeFlowAccessTokenResult != null) { + builder.addAttribute(CODE_ACCESS_TOKEN_RESULT, codeFlowAccessTokenResult); + } return builder.build(); } @@ -547,6 +553,9 @@ static OidcTenantConfig mergeTenantConfig(OidcTenantConfig tenant, OidcTenantCon if (tenant.authentication.scopes.isEmpty()) { tenant.authentication.scopes = provider.authentication.scopes; } + if (tenant.authentication.scopeSeparator.isEmpty()) { + tenant.authentication.scopeSeparator = provider.authentication.scopeSeparator; + } if (tenant.authentication.addOpenidScope.isEmpty()) { tenant.authentication.addOpenidScope = provider.authentication.addOpenidScope; } @@ -656,7 +665,8 @@ public void handle(Void event) { } public static String encodeScopes(OidcTenantConfig oidcConfig) { - return OidcCommonUtils.urlEncode(String.join(" ", getAllScopes(oidcConfig))); + return OidcCommonUtils.urlEncode(String.join(oidcConfig.authentication.scopeSeparator.orElse(DEFAULT_SCOPE_SEPARATOR), + getAllScopes(oidcConfig))); } public static List getAllScopes(OidcTenantConfig oidcConfig) { diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRuntimePropertiesDTO.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRuntimePropertiesDTO.java index 743e0b86426b67..5695a5bd53b15c 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRuntimePropertiesDTO.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRuntimePropertiesDTO.java @@ -15,6 +15,7 @@ public class OidcDevUiRuntimePropertiesDTO { private static final String END_SESSION_PATH_CONFIG_KEY = CONFIG_PREFIX + "end-session-path"; private static final String POST_LOGOUT_URI_PARAM_CONFIG_KEY = CONFIG_PREFIX + "logout.post-logout-uri-param"; private static final String SCOPES_KEY = CONFIG_PREFIX + "authentication.scopes"; + private static final String AUTH_EXTRA_PARAMS_KEY = CONFIG_PREFIX + "authentication.extra-params"; private final String clientId; private final String clientSecret; private final String authorizationUrl; @@ -22,6 +23,7 @@ public class OidcDevUiRuntimePropertiesDTO { private final String logoutUrl; private final String postLogoutUriParam; private final String scopes; + private final String authExtraParams; private final int httpPort; private final String oidcProviderName; private final String oidcApplicationType; @@ -49,6 +51,7 @@ public class OidcDevUiRuntimePropertiesDTO { this.logoutUrl = new OidcConfigPropertySupplier(END_SESSION_PATH_CONFIG_KEY, logoutUrl, true).get(config); this.postLogoutUriParam = new OidcConfigPropertySupplier(POST_LOGOUT_URI_PARAM_CONFIG_KEY).get(config); this.scopes = new OidcConfigPropertySupplier(SCOPES_KEY).get(config); + this.authExtraParams = new OidcConfigPropertySupplier(AUTH_EXTRA_PARAMS_KEY).get(config); this.httpPort = httpPort; this.oidcProviderName = oidcProviderName; this.oidcApplicationType = oidcApplicationType; @@ -92,6 +95,10 @@ public String getScopes() { return scopes; } + public String getAuthExtraParams() { + return authExtraParams; + } + public int getHttpPort() { return httpPort; } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/AzureAccessTokenCustomizer.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/AzureAccessTokenCustomizer.java index 5d764fe560fff1..edad83046e7be5 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/AzureAccessTokenCustomizer.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/AzureAccessTokenCustomizer.java @@ -10,22 +10,22 @@ import io.quarkus.oidc.OIDCException; import io.quarkus.oidc.TokenCustomizer; +import io.quarkus.oidc.common.runtime.OidcConstants; import io.quarkus.oidc.runtime.OidcUtils; @Named("azure-access-token-customizer") @ApplicationScoped public class AzureAccessTokenCustomizer implements TokenCustomizer { - private static final String NONCE = "nonce"; @Override public JsonObject customizeHeaders(JsonObject headers) { try { - String nonce = headers.getString(NONCE); + String nonce = headers.containsKey(OidcConstants.NONCE) ? headers.getString(OidcConstants.NONCE) : null; if (nonce != null) { byte[] nonceSha256 = OidcUtils.getSha256Digest(nonce.getBytes(StandardCharsets.UTF_8)); byte[] newNonceBytes = Base64.getUrlEncoder().withoutPadding().encode(nonceSha256); return Json.createObjectBuilder(headers) - .add(NONCE, new String(newNonceBytes, StandardCharsets.UTF_8)).build(); + .add(OidcConstants.NONCE, new String(newNonceBytes, StandardCharsets.UTF_8)).build(); } return null; } catch (Exception ex) { diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/KnownOidcProviders.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/KnownOidcProviders.java index d59f5fec66fb49..36bc6425e2e49c 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/KnownOidcProviders.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/providers/KnownOidcProviders.java @@ -172,6 +172,7 @@ private static OidcTenantConfig strava() { ret.getToken().setVerifyAccessTokenWithUserInfo(true); ret.getCredentials().getClientSecret().setMethod(Method.QUERY); + ret.getAuthentication().setScopeSeparator(","); return ret; } diff --git a/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/KnownOidcProvidersTest.java b/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/KnownOidcProvidersTest.java index 1bafcd14e7b914..715fe4a563fe6b 100644 --- a/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/KnownOidcProvidersTest.java +++ b/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/KnownOidcProvidersTest.java @@ -455,6 +455,7 @@ public void testAcceptStravaProperties() { assertFalse(config.getAuthentication().idTokenRequired.get()); assertEquals(Method.QUERY, config.credentials.clientSecret.method.get()); assertEquals("/strava", config.authentication.redirectPath.get()); + assertEquals(",", config.authentication.scopeSeparator.get()); } @Test @@ -472,6 +473,7 @@ public void testOverrideStravaProperties() { tenant.token.setVerifyAccessTokenWithUserInfo(false); tenant.credentials.clientSecret.setMethod(Method.BASIC); tenant.authentication.setRedirectPath("/fitness-app"); + tenant.authentication.setScopeSeparator(" "); OidcTenantConfig config = OidcUtils.mergeTenantConfig(tenant, KnownOidcProviders.provider(Provider.STRAVA)); @@ -485,6 +487,7 @@ public void testOverrideStravaProperties() { assertFalse(config.token.verifyAccessTokenWithUserInfo.get()); assertEquals(Method.BASIC, config.credentials.clientSecret.method.get()); assertEquals("/fitness-app", config.authentication.redirectPath.get()); + assertEquals(" ", config.authentication.scopeSeparator.get()); } @Test diff --git a/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcUtilsTest.java b/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcUtilsTest.java index 40b139e0b95224..68ee4e1c65a2c6 100644 --- a/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcUtilsTest.java +++ b/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcUtilsTest.java @@ -301,6 +301,15 @@ public void testEncodeAllScopes() throws Exception { assertEquals("openid+a%3A1+b%3A2+c+d", OidcUtils.encodeScopes(config)); } + @Test + public void testEncodeAllScopesWithCustomSeparator() throws Exception { + OidcTenantConfig config = new OidcTenantConfig(); + config.authentication.setScopeSeparator(","); + config.authentication.setScopes(List.of("a:1", "b:2")); + config.authentication.setExtraParams(Map.of("scope", "c,d")); + assertEquals("openid%2Ca%3A1%2Cb%3A2%2Cc%2Cd", OidcUtils.encodeScopes(config)); + } + public static JsonObject read(InputStream input) throws IOException { try (BufferedReader buffer = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) { return new JsonObject(buffer.lines().collect(Collectors.joining("\n"))); diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/QuarkusContextStorage.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/QuarkusContextStorage.java index ebba2e69aee641..fb36955494bb31 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/QuarkusContextStorage.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/QuarkusContextStorage.java @@ -75,7 +75,7 @@ public Scope attach(io.vertx.core.Context vertxContext, Context toAttach) { public void close() { final Context before = getContext(vertxContext); if (before != toAttach) { - log.warn("Context in storage not the expected context, Scope.close was not called correctly. Details:" + + log.info("Context in storage not the expected context, Scope.close was not called correctly. Details:" + " OTel context before: " + OpenTelemetryUtil.getSpanData(before) + ". OTel context toAttach: " + spanDataToAttach); } diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/exporter/OtlpExporterTracesConfig.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/exporter/OtlpExporterTracesConfig.java index c45ccd88dd41d8..20aa1cbe976ffc 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/exporter/OtlpExporterTracesConfig.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/exporter/OtlpExporterTracesConfig.java @@ -55,7 +55,7 @@ public interface OtlpExporterTracesConfig { * OTLP defines the encoding of telemetry data and the protocol used to exchange data between the client and the * server. Depending on the exporter, the available protocols will be different. *

- * Currently, only {@code grpc} and {@code http} are allowed. + * Currently, only {@code grpc} and {@code http/protobuf} are allowed. */ @WithDefault(Protocol.GRPC) Optional protocol(); diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxGrpcExporter.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxGrpcExporter.java index 22a97a73e35d2d..af5206f3c152b6 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxGrpcExporter.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxGrpcExporter.java @@ -6,6 +6,7 @@ import java.time.Duration; import java.util.Collection; import java.util.Map; +import java.util.concurrent.CompletionStage; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.function.Supplier; @@ -21,6 +22,7 @@ import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SpanExporter; import io.quarkus.vertx.core.runtime.BufferOutputStream; +import io.smallrye.mutiny.Uni; import io.vertx.core.Handler; import io.vertx.core.Vertx; import io.vertx.core.buffer.Buffer; @@ -30,6 +32,7 @@ import io.vertx.grpc.client.GrpcClient; import io.vertx.grpc.client.GrpcClientRequest; import io.vertx.grpc.client.GrpcClientResponse; +import io.vertx.grpc.common.GrpcError; import io.vertx.grpc.common.GrpcStatus; import io.vertx.grpc.common.ServiceName; @@ -42,12 +45,14 @@ final class VertxGrpcExporter implements SpanExporter { private static final String GRPC_MESSAGE = "grpc-message"; private static final Logger internalLogger = Logger.getLogger(VertxGrpcExporter.class.getName()); + private static final int MAX_ATTEMPTS = 3; private final ThrottlingLogger logger = new ThrottlingLogger(internalLogger); // TODO: is there something in JBoss Logging we can use? // We only log unimplemented once since it's a configuration issue that won't be recovered. private final AtomicBoolean loggedUnimplemented = new AtomicBoolean(); private final AtomicBoolean isShutdown = new AtomicBoolean(); + private final CompletableResultCode shutdownResult = new CompletableResultCode(); private final String type; private final ExporterMetrics exporterMetrics; private final SocketAddress server; @@ -86,30 +91,41 @@ private CompletableResultCode export(TraceRequestMarshaler marshaler, int numIte exporterMetrics.addSeen(numItems); var result = new CompletableResultCode(); - var onSuccessHandler = new ClientRequestOnSuccessHandler(headers, compressionEnabled, exporterMetrics, marshaler, - loggedUnimplemented, logger, type, numItems, result); - client.request(server) - .onSuccess(onSuccessHandler) - .onFailure(new Handler<>() { - @Override - public void handle(Throwable t) { - // TODO: is there a better way todo retry? - // TODO: should we only retry on a specific errors? - - client.request(server) - .onSuccess(onSuccessHandler) - .onFailure(new Handler<>() { - @Override - public void handle(Throwable event) { - failOnClientRequest(numItems, t, result); - } - }); - } - }); + var onSuccessHandler = new ClientRequestOnSuccessHandler(client, server, headers, compressionEnabled, exporterMetrics, + marshaler, + loggedUnimplemented, logger, type, numItems, result, 1); + + initiateSend(client, server, MAX_ATTEMPTS, onSuccessHandler, new Consumer<>() { + @Override + public void accept(Throwable throwable) { + failOnClientRequest(numItems, throwable, result); + } + }); return result; } + private static void initiateSend(GrpcClient client, SocketAddress server, + int numberOfAttempts, + Handler> onSuccessHandler, + Consumer onFailureCallback) { + Uni.createFrom().completionStage(new Supplier>>() { + + @Override + public CompletionStage> get() { + return client.request(server).toCompletionStage(); + } + }).onFailure().retry() + .withBackOff(Duration.ofMillis(100)) + .atMost(numberOfAttempts).subscribe().with( + new Consumer<>() { + @Override + public void accept(GrpcClientRequest request) { + onSuccessHandler.handle(request); + } + }, onFailureCallback); + } + private void failOnClientRequest(int numItems, Throwable t, CompletableResultCode result) { exporterMetrics.addFailed(numItems); logger.log( @@ -136,15 +152,31 @@ public CompletableResultCode flush() { @Override public CompletableResultCode shutdown() { if (!isShutdown.compareAndSet(false, true)) { - logger.log(Level.INFO, "Calling shutdown() multiple times."); - return CompletableResultCode.ofSuccess(); + logger.log(Level.FINE, "Calling shutdown() multiple times."); + return shutdownResult; } - client.close(); - return CompletableResultCode.ofSuccess(); + + client.close() + .onSuccess( + new Handler<>() { + @Override + public void handle(Void event) { + shutdownResult.succeed(); + } + }) + .onFailure(new Handler<>() { + @Override + public void handle(Throwable event) { + shutdownResult.fail(); + } + }); + return shutdownResult; } private static final class ClientRequestOnSuccessHandler implements Handler> { + private final GrpcClient client; + private final SocketAddress server; private final Map headers; private final boolean compressionEnabled; private final ExporterMetrics exporterMetrics; @@ -156,7 +188,11 @@ private static final class ClientRequestOnSuccessHandler implements Handler headers, + private final int attemptNumber; + + public ClientRequestOnSuccessHandler(GrpcClient client, + SocketAddress server, + Map headers, boolean compressionEnabled, ExporterMetrics exporterMetrics, TraceRequestMarshaler marshaler, @@ -164,7 +200,10 @@ public ClientRequestOnSuccessHandler(Map headers, ThrottlingLogger logger, String type, int numItems, - CompletableResultCode result) { + CompletableResultCode result, + int attemptNumber) { + this.client = client; + this.server = server; this.headers = headers; this.compressionEnabled = compressionEnabled; this.exporterMetrics = exporterMetrics; @@ -174,6 +213,7 @@ public ClientRequestOnSuccessHandler(Map headers, this.type = type; this.numItems = numItems; this.result = result; + this.attemptNumber = attemptNumber; } @Override @@ -201,20 +241,53 @@ public void handle(GrpcClientRequest request) { request.send(buffer).onSuccess(new Handler<>() { @Override public void handle(GrpcClientResponse response) { - GrpcStatus status = getStatus(response); - if (status == GrpcStatus.OK) { - exporterMetrics.addSuccess(numItems); - result.succeed(); - return; - } - String statusMessage = getStatusMessage(response); - if (statusMessage == null) { - // TODO: this needs investigation, when this happened, the spans actually got to the server, but for some reason no status code was present in the result - exporterMetrics.addSuccess(numItems); - result.succeed(); - return; - } + response.exceptionHandler(new Handler<>() { + @Override + public void handle(Throwable t) { + if (attemptNumber <= MAX_ATTEMPTS) { + // retry + initiateSend(client, server, + MAX_ATTEMPTS - attemptNumber, + newAttempt(), + new Consumer<>() { + @Override + public void accept(Throwable throwable) { + failOnClientRequest(numItems, throwable, result); + } + }); + + } else { + exporterMetrics.addFailed(numItems); + logger.log( + Level.SEVERE, + "Failed to export " + + type + + "s. The stream failed. Full error message: " + + t.getMessage()); + result.fail(); + } + } + }).errorHandler(new Handler<>() { + @Override + public void handle(GrpcError error) { + handleError(error.status, response); + } + }).endHandler(new Handler<>() { + @Override + public void handle(Void ignored) { + GrpcStatus status = getStatus(response); + if (status == GrpcStatus.OK) { + exporterMetrics.addSuccess(numItems); + result.succeed(); + } else { + handleError(status, response); + } + } + }); + } + private void handleError(GrpcStatus status, GrpcClientResponse response) { + String statusMessage = getStatusMessage(response); logAppropriateWarning(status, statusMessage); exporterMetrics.addFailed(numItems); result.fail(); @@ -237,12 +310,20 @@ private void logAppropriateWarning(GrpcStatus status, + statusMessage); } else { if (status == null) { - logger.log( - Level.WARNING, - "Failed to export " - + type - + "s. Server responded with error message: " - + statusMessage); + if (statusMessage == null) { + logger.log( + Level.WARNING, + "Failed to export " + + type + + "s. Perhaps the collector does not support collecting traces using grpc? Try configuring 'quarkus.otel.exporter.otlp.traces.protocol=http/protobuf'"); + } else { + logger.log( + Level.WARNING, + "Failed to export " + + type + + "s. Server responded with error message: " + + statusMessage); + } } else { logger.log( Level.WARNING, @@ -316,14 +397,27 @@ private String getStatusMessage(GrpcClientResponse response) { }).onFailure(new Handler<>() { @Override public void handle(Throwable t) { - exporterMetrics.addFailed(numItems); - logger.log( - Level.SEVERE, - "Failed to export " - + type - + "s. The request could not be executed. Full error message: " - + t.getMessage()); - result.fail(); + if (attemptNumber <= MAX_ATTEMPTS) { + // retry + initiateSend(client, server, + MAX_ATTEMPTS - attemptNumber, + newAttempt(), + new Consumer<>() { + @Override + public void accept(Throwable throwable) { + failOnClientRequest(numItems, throwable, result); + } + }); + } else { + exporterMetrics.addFailed(numItems); + logger.log( + Level.SEVERE, + "Failed to export " + + type + + "s. The request could not be executed. Full error message: " + + t.getMessage()); + result.fail(); + } } }); } catch (IOException e) { @@ -337,5 +431,21 @@ public void handle(Throwable t) { result.fail(); } } + + private void failOnClientRequest(int numItems, Throwable t, CompletableResultCode result) { + exporterMetrics.addFailed(numItems); + logger.log( + Level.SEVERE, + "Failed to export " + + type + + "s. The request could not be executed. Full error message: " + + t.getMessage()); + result.fail(); + } + + public ClientRequestOnSuccessHandler newAttempt() { + return new ClientRequestOnSuccessHandler(client, server, headers, compressionEnabled, exporterMetrics, marshaler, + loggedUnimplemented, logger, type, numItems, result, attemptNumber + 1); + } } } diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java index ed09c04c0541ad..bc8472286dae8e 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/VertxHttpExporter.java @@ -8,16 +8,23 @@ import java.time.Duration; import java.util.Collection; import java.util.Map; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.logging.Level; +import java.util.logging.Logger; import java.util.zip.GZIPOutputStream; import io.opentelemetry.exporter.internal.http.HttpExporter; import io.opentelemetry.exporter.internal.http.HttpSender; import io.opentelemetry.exporter.internal.otlp.traces.TraceRequestMarshaler; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SpanExporter; import io.quarkus.vertx.core.runtime.BufferOutputStream; +import io.smallrye.mutiny.Uni; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; import io.vertx.core.Vertx; @@ -31,6 +38,11 @@ final class VertxHttpExporter implements SpanExporter { + private static final Logger internalLogger = Logger.getLogger(VertxHttpExporter.class.getName()); + private static final ThrottlingLogger logger = new ThrottlingLogger(internalLogger); + + private static final int MAX_ATTEMPTS = 3; + private final HttpExporter delegate; VertxHttpExporter(HttpExporter delegate) { @@ -83,6 +95,9 @@ static final class VertxHttpSender implements HttpSender { this.client = vertx.createHttpClient(httpClientOptions); } + private final AtomicBoolean isShutdown = new AtomicBoolean(); + private final CompletableResultCode shutdownResult = new CompletableResultCode(); + private static String determineBasePath(URI baseUri) { String path = baseUri.getPath(); if (path.isEmpty() || path.equals("/")) { @@ -100,81 +115,188 @@ private static String determineBasePath(URI baseUri) { @Override public void send(Consumer marshaler, int contentLength, - Consumer onResponse, + Consumer onHttpResponseRead, Consumer onError) { - client.request(HttpMethod.POST, basePath + TRACES_PATH) - .onSuccess(new Handler<>() { + String requestURI = basePath + TRACES_PATH; + var clientRequestSuccessHandler = new ClientRequestSuccessHandler(client, requestURI, headers, compressionEnabled, + contentType, + contentLength, onHttpResponseRead, + onError, marshaler, 1); + initiateSend(client, requestURI, MAX_ATTEMPTS, clientRequestSuccessHandler, onError); + } + + private static void initiateSend(HttpClient client, String requestURI, + int numberOfAttempts, + Handler clientRequestSuccessHandler, + Consumer onError) { + Uni.createFrom().completionStage(new Supplier>() { + @Override + public CompletionStage get() { + return client.request(HttpMethod.POST, requestURI).toCompletionStage(); + } + }).onFailure().retry() + .withBackOff(Duration.ofMillis(100)) + .atMost(numberOfAttempts) + .subscribe().with(new Consumer<>() { @Override - public void handle(HttpClientRequest request) { + public void accept(HttpClientRequest request) { + clientRequestSuccessHandler.handle(request); + } + }, onError); + } + + @Override + public CompletableResultCode shutdown() { + if (!isShutdown.compareAndSet(false, true)) { + logger.log(Level.FINE, "Calling shutdown() multiple times."); + return shutdownResult; + } + + client.close() + .onSuccess( + new Handler<>() { + @Override + public void handle(Void event) { + shutdownResult.succeed(); + } + }) + .onFailure(new Handler<>() { + @Override + public void handle(Throwable event) { + shutdownResult.fail(); + } + }); + return shutdownResult; + } + + private static class ClientRequestSuccessHandler implements Handler { + private final HttpClient client; + private final String requestURI; + private final Map headers; + private final boolean compressionEnabled; + private final String contentType; + private final int contentLength; + private final Consumer onHttpResponseRead; + private final Consumer onError; + private final Consumer marshaler; + + private final int attemptNumber; - HttpClientRequest clientRequest = request.response(new Handler<>() { + public ClientRequestSuccessHandler(HttpClient client, + String requestURI, Map headers, + boolean compressionEnabled, + String contentType, + int contentLength, + Consumer onHttpResponseRead, + Consumer onError, + Consumer marshaler, + int attemptNumber) { + this.client = client; + this.requestURI = requestURI; + this.headers = headers; + this.compressionEnabled = compressionEnabled; + this.contentType = contentType; + this.contentLength = contentLength; + this.onHttpResponseRead = onHttpResponseRead; + this.onError = onError; + this.marshaler = marshaler; + this.attemptNumber = attemptNumber; + } + + @Override + public void handle(HttpClientRequest request) { + + HttpClientRequest clientRequest = request.response(new Handler<>() { + @Override + public void handle(AsyncResult callResult) { + if (callResult.succeeded()) { + HttpClientResponse clientResponse = callResult.result(); + clientResponse.body(new Handler<>() { @Override - public void handle(AsyncResult callResult) { - if (callResult.succeeded()) { - HttpClientResponse clientResponse = callResult.result(); - clientResponse.body(new Handler<>() { + public void handle(AsyncResult bodyResult) { + if (bodyResult.succeeded()) { + if (clientResponse.statusCode() >= 500) { + if (attemptNumber <= MAX_ATTEMPTS) { + // we should retry for 5xx error as they might be recoverable + initiateSend(client, requestURI, + MAX_ATTEMPTS - attemptNumber, + newAttempt(), + onError); + return; + } + } + onHttpResponseRead.accept(new Response() { @Override - public void handle(AsyncResult bodyResult) { - if (bodyResult.succeeded()) { - onResponse.accept(new Response() { - @Override - public int statusCode() { - return clientResponse.statusCode(); - } - - @Override - public String statusMessage() { - return clientResponse.statusMessage(); - } - - @Override - public byte[] responseBody() { - return bodyResult.result().getBytes(); - } - }); - } else { - onError.accept(bodyResult.cause()); - } + public int statusCode() { + return clientResponse.statusCode(); + } + + @Override + public String statusMessage() { + return clientResponse.statusMessage(); + } + + @Override + public byte[] responseBody() { + return bodyResult.result().getBytes(); } }); } else { - onError.accept(callResult.cause()); + if (attemptNumber <= MAX_ATTEMPTS) { + // retry + initiateSend(client, requestURI, + MAX_ATTEMPTS - attemptNumber, + newAttempt(), + onError); + } else { + onError.accept(bodyResult.cause()); + } } } - }) - .putHeader("Content-Type", contentType); - - Buffer buffer = Buffer.buffer(contentLength); - OutputStream os = new BufferOutputStream(buffer); - if (compressionEnabled) { - clientRequest.putHeader("Content-Encoding", "gzip"); - try (var gzos = new GZIPOutputStream(os)) { - marshaler.accept(gzos); - } catch (IOException e) { - throw new IllegalStateException(e); - } + }); + } else { + if (attemptNumber <= MAX_ATTEMPTS) { + // retry + initiateSend(client, requestURI, + MAX_ATTEMPTS - attemptNumber, + newAttempt(), + onError); } else { - marshaler.accept(os); + onError.accept(callResult.cause()); } + } + } + }) + .putHeader("Content-Type", contentType); - if (!headers.isEmpty()) { - for (var entry : headers.entrySet()) { - clientRequest.putHeader(entry.getKey(), entry.getValue()); - } - } + Buffer buffer = Buffer.buffer(contentLength); + OutputStream os = new BufferOutputStream(buffer); + if (compressionEnabled) { + clientRequest.putHeader("Content-Encoding", "gzip"); + try (var gzos = new GZIPOutputStream(os)) { + marshaler.accept(gzos); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } else { + marshaler.accept(os); + } - clientRequest.send(buffer); + if (!headers.isEmpty()) { + for (var entry : headers.entrySet()) { + clientRequest.putHeader(entry.getKey(), entry.getValue()); + } + } - } - }) - .onFailure(onError::accept); - } + clientRequest.send(buffer); + } - @Override - public CompletableResultCode shutdown() { - client.close(); - return CompletableResultCode.ofSuccess(); + public ClientRequestSuccessHandler newAttempt() { + return new ClientRequestSuccessHandler(client, requestURI, headers, compressionEnabled, + contentType, contentLength, onHttpResponseRead, + onError, marshaler, attemptNumber + 1); + } } } } diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/cdi/TracerProducer.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/cdi/TracerProducer.java index b17c611cfd6e5b..6c44fb6edbca36 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/cdi/TracerProducer.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/cdi/TracerProducer.java @@ -2,6 +2,10 @@ import static io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig.INSTRUMENTATION_NAME; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; + import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.context.RequestScoped; import jakarta.enterprise.inject.Produces; @@ -9,7 +13,13 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.baggage.Baggage; +import io.opentelemetry.api.baggage.BaggageBuilder; +import io.opentelemetry.api.baggage.BaggageEntry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.StatusCode; import io.opentelemetry.api.trace.Tracer; import io.quarkus.arc.DefaultBean; import io.quarkus.opentelemetry.runtime.tracing.DelayedAttributes; @@ -33,13 +43,93 @@ public Tracer getTracer() { @RequestScoped @DefaultBean public Span getSpan() { - return Span.current(); + return new Span() { + @Override + public Span setAttribute(final AttributeKey key, final T value) { + return Span.current().setAttribute(key, value); + } + + @Override + public Span addEvent(final String name, final Attributes attributes) { + return Span.current().addEvent(name, attributes); + } + + @Override + + public Span addEvent( + final String name, + final Attributes attributes, + final long timestamp, + final TimeUnit unit) { + return Span.current().addEvent(name, attributes, timestamp, unit); + } + + @Override + public Span setStatus(final StatusCode statusCode, final String description) { + return Span.current().setStatus(statusCode, description); + } + + @Override + public Span recordException(final Throwable exception, final Attributes additionalAttributes) { + return Span.current().recordException(exception, additionalAttributes); + } + + @Override + public Span updateName(final String name) { + return Span.current().updateName(name); + } + + @Override + public void end() { + Span.current().end(); + } + + @Override + public void end(final long timestamp, final TimeUnit unit) { + Span.current().end(timestamp, unit); + } + + @Override + public SpanContext getSpanContext() { + return Span.current().getSpanContext(); + } + + @Override + public boolean isRecording() { + return Span.current().isRecording(); + } + }; } @Produces @RequestScoped @DefaultBean public Baggage getBaggage() { - return Baggage.current(); + return new Baggage() { + @Override + public int size() { + return Baggage.current().size(); + } + + @Override + public void forEach(final BiConsumer consumer) { + Baggage.current().forEach(consumer); + } + + @Override + public Map asMap() { + return Baggage.current().asMap(); + } + + @Override + public String getEntryValue(final String entryKey) { + return Baggage.current().getEntryValue(entryKey); + } + + @Override + public BaggageBuilder toBuilder() { + return Baggage.current().toBuilder(); + } + }; } } diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/resteasy/OpenTelemetryClassicThreadContext.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/resteasy/OpenTelemetryClassicThreadContext.java new file mode 100644 index 00000000000000..f2f0112822ea51 --- /dev/null +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/resteasy/OpenTelemetryClassicThreadContext.java @@ -0,0 +1,35 @@ +package io.quarkus.opentelemetry.runtime.tracing.intrumentation.resteasy; + +import java.util.HashMap; +import java.util.Map; + +import jakarta.ws.rs.ext.Provider; + +import org.jboss.resteasy.spi.concurrent.ThreadContext; + +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; + +@Provider +public class OpenTelemetryClassicThreadContext implements ThreadContext> { + @Override + public Map capture() { + Map context = new HashMap<>(); + context.put("context", Context.current()); + return context; + } + + @Override + public void push(final Map context) { + Context current = (Context) context.get("context"); + Scope scope = current.makeCurrent(); + context.put("scope", scope); + } + + @Override + public void reset(final Map context) { + Scope scope = (Scope) context.get("scope"); + scope.close(); + context.clear(); + } +} \ No newline at end of file diff --git a/extensions/opentelemetry/runtime/src/main/resources/META-INF/services/org.jboss.resteasy.spi.concurrent.ThreadContext b/extensions/opentelemetry/runtime/src/main/resources/META-INF/services/org.jboss.resteasy.spi.concurrent.ThreadContext new file mode 100644 index 00000000000000..c2b7e3c4bf1e70 --- /dev/null +++ b/extensions/opentelemetry/runtime/src/main/resources/META-INF/services/org.jboss.resteasy.spi.concurrent.ThreadContext @@ -0,0 +1 @@ +io.quarkus.opentelemetry.runtime.tracing.intrumentation.resteasy.OpenTelemetryClassicThreadContext diff --git a/extensions/panache/hibernate-orm-panache-common/runtime/src/main/java/io/quarkus/hibernate/orm/panache/common/runtime/CommonPanacheQueryImpl.java b/extensions/panache/hibernate-orm-panache-common/runtime/src/main/java/io/quarkus/hibernate/orm/panache/common/runtime/CommonPanacheQueryImpl.java index 6e4ee88dca82c5..f9dd38ee25652b 100644 --- a/extensions/panache/hibernate-orm-panache-common/runtime/src/main/java/io/quarkus/hibernate/orm/panache/common/runtime/CommonPanacheQueryImpl.java +++ b/extensions/panache/hibernate-orm-panache-common/runtime/src/main/java/io/quarkus/hibernate/orm/panache/common/runtime/CommonPanacheQueryImpl.java @@ -302,7 +302,7 @@ private String countQuery(String selectQuery) { return countQuery; } - return PanacheJpaUtil.getCountQuery(selectQuery); + return PanacheJpaUtil.getFastCountQuery(selectQuery); } @SuppressWarnings("unchecked") diff --git a/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java b/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java index 30e13c7a45c5f6..ff9e1f9f751ba5 100644 --- a/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java +++ b/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java @@ -2,9 +2,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import io.quarkus.panache.common.Sort; +import io.quarkus.panache.common.exception.PanacheQueryException; import io.quarkus.panache.hibernate.common.runtime.PanacheJpaUtil; public class JpaOperationsSortTest { @@ -18,7 +20,7 @@ public void testEmptySortByYieldsEmptyString() { @Test public void testSortBy() { Sort sort = Sort.by("foo", "bar"); - assertEquals(" ORDER BY foo , bar", PanacheJpaUtil.toOrderBy(sort)); + assertEquals(" ORDER BY `foo` , `bar`", PanacheJpaUtil.toOrderBy(sort)); } @Test @@ -29,14 +31,48 @@ public void testEmptySortEmptyYieldsEmptyString() { @Test public void testSortByNullsFirst() { - Sort emptySort = Sort.by("foo", Sort.Direction.Ascending, Sort.NullPrecedence.NULLS_FIRST); - assertEquals(" ORDER BY foo NULLS FIRST", PanacheJpaUtil.toOrderBy(emptySort)); + Sort sort = Sort.by("foo", Sort.Direction.Ascending, Sort.NullPrecedence.NULLS_FIRST); + assertEquals(" ORDER BY `foo` NULLS FIRST", PanacheJpaUtil.toOrderBy(sort)); } @Test public void testSortByNullsLast() { - Sort emptySort = Sort.by("foo", Sort.Direction.Descending, Sort.NullPrecedence.NULLS_LAST); - assertEquals(" ORDER BY foo DESC NULLS LAST", PanacheJpaUtil.toOrderBy(emptySort)); + Sort sort = Sort.by("foo", Sort.Direction.Descending, Sort.NullPrecedence.NULLS_LAST); + assertEquals(" ORDER BY `foo` DESC NULLS LAST", PanacheJpaUtil.toOrderBy(sort)); } + @Test + public void testSortByColumnWithBacktick() { + Sort sort = Sort.by("jeanne", "d`arc"); + Assertions.assertThrowsExactly(PanacheQueryException.class, () -> PanacheJpaUtil.toOrderBy(sort), + "Sort column name cannot have backticks"); + } + + @Test + public void testSortByQuotedColumn() { + Sort sort = Sort.by("`foo`", "bar"); + assertEquals(" ORDER BY `foo` , `bar`", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testSortByEmbeddedColumn() { + Sort sort = Sort.by("foo.bar"); + assertEquals(" ORDER BY `foo`.`bar`", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testSortByQuotedEmbeddedColumn() { + Sort sort1 = Sort.by("foo.`bar`"); + assertEquals(" ORDER BY `foo`.`bar`", PanacheJpaUtil.toOrderBy(sort1)); + Sort sort2 = Sort.by("`foo`.bar"); + assertEquals(" ORDER BY `foo`.`bar`", PanacheJpaUtil.toOrderBy(sort2)); + Sort sort3 = Sort.by("`foo`.`bar`"); + assertEquals(" ORDER BY `foo`.`bar`", PanacheJpaUtil.toOrderBy(sort3)); + } + + @Test + public void testSortByDisabledEscaping() { + Sort sort1 = Sort.by("foo.`bar`").disableEscaping(); + assertEquals(" ORDER BY foo.`bar`", PanacheJpaUtil.toOrderBy(sort1)); + } } diff --git a/extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/package-info.java b/extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/package-info.java index a3e78008219455..963c9846594356 100644 --- a/extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/package-info.java +++ b/extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/package-info.java @@ -57,31 +57,35 @@ * at the end. *

*

- * If your select query does not start with from, we support the following additional forms: + * If your select query does not start with from, select, or with, we support the + * following additional forms: *

*
    *
  • order by ... which will expand to from EntityName order by ...
  • - *
  • <singleColumnName> (and single parameter) which will expand to - * from EntityName where <singleColumnName> = ?
  • + *
  • <singleAttribute> (and single parameter) which will expand to + * from EntityName where <singleAttribute> = ?
  • + *
  • where <query> will expand to from EntityName where <query> *
  • <query> will expand to from EntityName where <query>
  • *
* + *

* If your update query does not start with update from, we support the following additional forms: *

*
    *
  • from EntityName ... which will expand to update from EntityName ...
  • - *
  • set? <singleColumnName> (and single parameter) which will expand to - * update from EntityName set <singleColumnName> = ?
  • + *
  • set? <singleAttribute> (and single parameter) which will expand to + * update from EntityName set <singleAttribute> = ?
  • *
  • set? <update-query> will expand to * update from EntityName set <update-query> = ?
  • *
* + *

* If your delete query does not start with delete from, we support the following additional forms: *

*
    *
  • from EntityName ... which will expand to delete from EntityName ...
  • - *
  • <singleColumnName> (and single parameter) which will expand to - * delete from EntityName where <singleColumnName> = ?
  • + *
  • <singleAttribute> (and single parameter) which will expand to + * delete from EntityName where <singleAttribute> = ?
  • *
  • <query> will expand to delete from EntityName where <query>
  • *
* diff --git a/extensions/panache/hibernate-reactive-panache-common/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/common/runtime/CommonPanacheQueryImpl.java b/extensions/panache/hibernate-reactive-panache-common/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/common/runtime/CommonPanacheQueryImpl.java index c1e7516c227658..4173dac1973d73 100644 --- a/extensions/panache/hibernate-reactive-panache-common/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/common/runtime/CommonPanacheQueryImpl.java +++ b/extensions/panache/hibernate-reactive-panache-common/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/common/runtime/CommonPanacheQueryImpl.java @@ -299,7 +299,7 @@ private String countQuery(String selectQuery) { if (countQuery != null) { return countQuery; } - return PanacheJpaUtil.getCountQuery(selectQuery); + return PanacheJpaUtil.getFastCountQuery(selectQuery); } @SuppressWarnings({ "unchecked", "rawtypes" }) diff --git a/extensions/panache/hibernate-reactive-panache/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/package-info.java b/extensions/panache/hibernate-reactive-panache/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/package-info.java index 81cd522653c12b..d40888dfc5f5df 100644 --- a/extensions/panache/hibernate-reactive-panache/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/package-info.java +++ b/extensions/panache/hibernate-reactive-panache/runtime/src/main/java/io/quarkus/hibernate/reactive/panache/package-info.java @@ -57,31 +57,35 @@ * at the end. *

*

- * If your select query does not start with from, we support the following additional forms: + * If your select query does not start with from, select, or with, we support the + * following additional forms: *

*
    *
  • order by ... which will expand to from EntityName order by ...
  • - *
  • <singleColumnName> (and single parameter) which will expand to - * from EntityName where <singleColumnName> = ?
  • + *
  • <singleAttribute> (and single parameter) which will expand to + * from EntityName where <singleAttribute> = ?
  • + *
  • where <query> will expand to from EntityName where <query> *
  • <query> will expand to from EntityName where <query>
  • *
* + *

* If your update query does not start with update from, we support the following additional forms: *

*
    *
  • from EntityName ... which will expand to update from EntityName ...
  • - *
  • set? <singleColumnName> (and single parameter) which will expand to - * update from EntityName set <singleColumnName> = ?
  • + *
  • set? <singleAttribute> (and single parameter) which will expand to + * update from EntityName set <singleAttribute> = ?
  • *
  • set? <update-query> will expand to * update from EntityName set <update-query> = ?
  • *
* + *

* If your delete query does not start with delete from, we support the following additional forms: *

*
    *
  • from EntityName ... which will expand to delete from EntityName ...
  • - *
  • <singleColumnName> (and single parameter) which will expand to - * delete from EntityName where <singleColumnName> = ?
  • + *
  • <singleAttribute> (and single parameter) which will expand to + * delete from EntityName where <singleAttribute> = ?
  • *
  • <query> will expand to delete from EntityName where <query>
  • *
* diff --git a/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/binder/MongoParserVisitor.java b/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/binder/MongoParserVisitor.java index 7af3a4cfbc5d96..fc3294487c00bf 100644 --- a/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/binder/MongoParserVisitor.java +++ b/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/binder/MongoParserVisitor.java @@ -3,6 +3,13 @@ import java.util.Map; import io.quarkus.panacheql.internal.HqlParser; +import io.quarkus.panacheql.internal.HqlParser.ComparisonPredicateContext; +import io.quarkus.panacheql.internal.HqlParser.GroupedExpressionContext; +import io.quarkus.panacheql.internal.HqlParser.GroupedPredicateContext; +import io.quarkus.panacheql.internal.HqlParser.NamedParameterContext; +import io.quarkus.panacheql.internal.HqlParser.ParameterContext; +import io.quarkus.panacheql.internal.HqlParser.PositionalParameterContext; +import io.quarkus.panacheql.internal.HqlParser.StandardFunctionContext; import io.quarkus.panacheql.internal.HqlParserBaseVisitor; class MongoParserVisitor extends HqlParserBaseVisitor { @@ -38,18 +45,28 @@ public String visitOrPredicate(HqlParser.OrPredicateContext ctx) { } @Override - public String visitEqualityPredicate(HqlParser.EqualityPredicateContext ctx) { - return ctx.expression(0).accept(this) + ":" + ctx.expression(1).accept(this); - } - - @Override - public String visitInequalityPredicate(HqlParser.InequalityPredicateContext ctx) { - return ctx.expression(0).accept(this) + ":{'$ne':" + ctx.expression(1).accept(this) + "}"; - } - - @Override - public String visitLessThanOrEqualPredicate(HqlParser.LessThanOrEqualPredicateContext ctx) { - return ctx.expression(0).accept(this) + ":{'$lte':" + ctx.expression(1).accept(this) + "}"; + public String visitComparisonPredicate(ComparisonPredicateContext ctx) { + String lhs = ctx.expression(0).accept(this); + String rhs = ctx.expression(1).accept(this); + if (ctx.comparisonOperator().EQUAL() != null) { + return lhs + ":" + rhs; + } + if (ctx.comparisonOperator().NOT_EQUAL() != null) { + return lhs + ":{'$ne':" + rhs + "}"; + } + if (ctx.comparisonOperator().GREATER() != null) { + return lhs + ":{'$gt':" + rhs + "}"; + } + if (ctx.comparisonOperator().GREATER_EQUAL() != null) { + return lhs + ":{'$gte':" + rhs + "}"; + } + if (ctx.comparisonOperator().LESS() != null) { + return lhs + ":{'$lt':" + rhs + "}"; + } + if (ctx.comparisonOperator().LESS_EQUAL() != null) { + return lhs + ":{'$lte':" + rhs + "}"; + } + return super.visitComparisonPredicate(ctx); } @Override @@ -64,33 +81,47 @@ public String visitLikePredicate(HqlParser.LikePredicateContext ctx) { } @Override - public String visitGreaterThanPredicate(HqlParser.GreaterThanPredicateContext ctx) { - return ctx.expression(0).accept(this) + ":{'$gt':" + ctx.expression(1).accept(this) + "}"; + public String visitIsNullPredicate(HqlParser.IsNullPredicateContext ctx) { + boolean exists = ctx.NOT() != null; + return ctx.expression().accept(this) + ":{'$exists':" + exists + "}"; + } + + @Override + public String visitLiteralExpression(HqlParser.LiteralExpressionContext ctx) { + String text = ctx.getText(); + // FIXME: this only really supports text literals + if (ctx.literal().STRING_LITERAL() != null) { + text = text.substring(1, text.length() - 1); + } + return CommonQueryBinder.escape(text); } @Override - public String visitLessThanPredicate(HqlParser.LessThanPredicateContext ctx) { - return ctx.expression(0).accept(this) + ":{'$lt':" + ctx.expression(1).accept(this) + "}"; + public String visitNamedParameter(NamedParameterContext ctx) { + return visitParameter(ctx); } @Override - public String visitGreaterThanOrEqualPredicate(HqlParser.GreaterThanOrEqualPredicateContext ctx) { - return ctx.expression(0).accept(this) + ":{'$gte':" + ctx.expression(1).accept(this) + "}"; + public String visitPositionalParameter(PositionalParameterContext ctx) { + return visitParameter(ctx); } @Override - public String visitIsNullPredicate(HqlParser.IsNullPredicateContext ctx) { - boolean exists = ctx.NOT() != null; - return ctx.expression().accept(this) + ":{'$exists':" + exists + "}"; + public String visitParameterExpression(HqlParser.ParameterExpressionContext ctx) { + return visitParameter(ctx.parameter()); } @Override - public String visitLiteralExpression(HqlParser.LiteralExpressionContext ctx) { - return CommonQueryBinder.escape(ctx.getText()); + public String visitGroupedExpression(GroupedExpressionContext ctx) { + return ctx.expression().accept(this); } @Override - public String visitParameterExpression(HqlParser.ParameterExpressionContext ctx) { + public String visitGroupedPredicate(GroupedPredicateContext ctx) { + return ctx.predicate().accept(this); + } + + private String visitParameter(ParameterContext ctx) { // this will match parameters used by PanacheQL : '?1' for index based or ':key' for named one. if (parameterMaps.containsKey(ctx.getText())) { Object value = parameterMaps.get(ctx.getText()); @@ -102,9 +133,20 @@ public String visitParameterExpression(HqlParser.ParameterExpressionContext ctx) } @Override - public String visitPathExpression(HqlParser.PathExpressionContext ctx) { + public String visitGeneralPathExpression(HqlParser.GeneralPathExpressionContext ctx) { + String identifier = unquote(ctx.getText()); // this is the name of the field, we apply replacement and escape with ' - return "'" + replacementMap.getOrDefault(ctx.getText(), ctx.getText()) + "'"; + return "'" + replacementMap.getOrDefault(identifier, identifier) + "'"; + } + + /** + * Removes backticks for quoted identifiers + */ + private String unquote(String text) { + if (text.startsWith("`") && text.endsWith("`") && text.length() >= 2) { + return text.substring(1, text.length() - 1); + } + return text; } @Override @@ -115,4 +157,10 @@ public String visitInPredicate(HqlParser.InPredicateContext ctx) { .append("]}"); return sb.toString(); } + + // Turn new date functions such as instant into regular fields, to not break existing queries + @Override + public String visitStandardFunction(StandardFunctionContext ctx) { + return "'" + ctx.getText() + "'"; + } } diff --git a/extensions/panache/mongodb-panache-common/runtime/src/test/java/io/quarkus/mongodb/panache/common/runtime/MongoOperationsTest.java b/extensions/panache/mongodb-panache-common/runtime/src/test/java/io/quarkus/mongodb/panache/common/runtime/MongoOperationsTest.java index 1b84561ffdd26c..14089ccafae324 100644 --- a/extensions/panache/mongodb-panache-common/runtime/src/test/java/io/quarkus/mongodb/panache/common/runtime/MongoOperationsTest.java +++ b/extensions/panache/mongodb-panache-common/runtime/src/test/java/io/quarkus/mongodb/panache/common/runtime/MongoOperationsTest.java @@ -95,6 +95,14 @@ public void testBindShorthandFilter() { //test field replacement query = operations.bindFilter(DemoObj.class, "property", new Object[] { "a value" }); assertEquals("{'value':'a value'}", query); + + // keywords (quoted) + query = operations.bindFilter(Object.class, "`instant` = ?1", new Object[] { "a value" }); + assertEquals("{'instant':'a value'}", query); + + // keywords (unquoted) + query = operations.bindFilter(Object.class, "instant = ?1", new Object[] { "a value" }); + assertEquals("{'instant':'a value'}", query); } private Object toDate(LocalDateTime of) { @@ -285,12 +293,12 @@ public void testBindEnhancedFilterByIndex() { assertEquals("{'field':{'$in':['f1', 'f2']},'isOk':true}", query); query = operations.bindFilter(DemoObj.class, - "field in ?1 and property = ?2 or property = ?3", + "field in ?1 and (property = ?2 or property = ?3)", new Object[] { list, "jpg", "gif" }); assertEquals("{'field':{'$in':['f1', 'f2']},'$or':[{'value':'jpg'},{'value':'gif'}]}", query); query = operations.bindFilter(DemoObj.class, - "field in ?1 and isOk = ?2 and property = ?3 or property = ?4", + "field in ?1 and isOk = ?2 and (property = ?3 or property = ?4)", new Object[] { list, true, "jpg", "gif" }); assertEquals("{'field':{'$in':['f1', 'f2']},'isOk':true,'$or':[{'value':'jpg'},{'value':'gif'}]}", query); } @@ -361,12 +369,12 @@ public void testBindEnhancedFilterByName() { assertEquals("{'field':{'$in':['f1', 'f2']},'isOk':true}", query); query = operations.bindFilter(DemoObj.class, - "field in :fields and property = :p1 or property = :p2", + "field in :fields and (property = :p1 or property = :p2)", Parameters.with("fields", list).and("p1", "jpg").and("p2", "gif").map()); assertEquals("{'field':{'$in':['f1', 'f2']},'$or':[{'value':'jpg'},{'value':'gif'}]}", query); query = operations.bindFilter(DemoObj.class, - "field in :fields and isOk = :isOk and property = :p1 or property = :p2", + "field in :fields and isOk = :isOk and (property = :p1 or property = :p2)", Parameters.with("fields", list) .and("isOk", true) .and("p1", "jpg") diff --git a/extensions/panache/mongodb-panache-kotlin/runtime/src/main/kotlin/io/quarkus/mongodb/panache/kotlin/Panache.kt b/extensions/panache/mongodb-panache-kotlin/runtime/src/main/kotlin/io/quarkus/mongodb/panache/kotlin/Panache.kt index 817c55f54bd434..6bf79027759365 100644 --- a/extensions/panache/mongodb-panache-kotlin/runtime/src/main/kotlin/io/quarkus/mongodb/panache/kotlin/Panache.kt +++ b/extensions/panache/mongodb-panache-kotlin/runtime/src/main/kotlin/io/quarkus/mongodb/panache/kotlin/Panache.kt @@ -1,6 +1,6 @@ package io.quarkus.mongodb.panache.kotlin -import com.mongodb.session.ClientSession +import com.mongodb.client.ClientSession import io.quarkus.mongodb.panache.kotlin.runtime.KotlinMongoOperations object Panache { diff --git a/extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/Panache.java b/extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/Panache.java index 2e1f900dee3675..e84fa4cb2201dc 100644 --- a/extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/Panache.java +++ b/extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/Panache.java @@ -1,6 +1,6 @@ package io.quarkus.mongodb.panache; -import com.mongodb.session.ClientSession; +import com.mongodb.client.ClientSession; import io.quarkus.mongodb.panache.runtime.JavaMongoOperations; diff --git a/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java b/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java index d19ca3a7904d2b..a503da211661c4 100644 --- a/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java +++ b/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java @@ -100,6 +100,7 @@ public void setNullPrecedence(NullPrecedence nullPrecedence) { } private List columns = new ArrayList<>(); + private boolean escapingEnabled = true; private Sort() { } @@ -293,6 +294,16 @@ public Sort and(String name, Direction direction, NullPrecedence nullPrecedence) return this; } + /** + * Disables escaping of column names with a backticks during HQL Order By clause generation + * + * @return this instance, modified. + */ + public Sort disableEscaping() { + escapingEnabled = false; + return this; + } + /** * Get the sort columns * @@ -311,4 +322,8 @@ public List getColumns() { public static Sort empty() { return by(); } + + public boolean isEscapingEnabled() { + return escapingEnabled; + } } diff --git a/extensions/panache/panache-hibernate-common/runtime/pom.xml b/extensions/panache/panache-hibernate-common/runtime/pom.xml index dd0b52b346c16d..d2adbad237ab9b 100644 --- a/extensions/panache/panache-hibernate-common/runtime/pom.xml +++ b/extensions/panache/panache-hibernate-common/runtime/pom.xml @@ -24,6 +24,14 @@ io.quarkus quarkus-panache-common
+ + org.hibernate.orm + hibernate-core + + + org.antlr + antlr4-runtime + jakarta.persistence jakarta.persistence-api diff --git a/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/CountParserVisitor.java b/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/CountParserVisitor.java new file mode 100644 index 00000000000000..5ac893a4545c6c --- /dev/null +++ b/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/CountParserVisitor.java @@ -0,0 +1,108 @@ +package io.quarkus.panache.hibernate.common.runtime; + +import org.antlr.v4.runtime.tree.TerminalNode; +import org.hibernate.grammars.hql.HqlParser.JoinContext; +import org.hibernate.grammars.hql.HqlParser.QueryContext; +import org.hibernate.grammars.hql.HqlParser.QueryOrderContext; +import org.hibernate.grammars.hql.HqlParser.SelectClauseContext; +import org.hibernate.grammars.hql.HqlParser.SimpleQueryGroupContext; +import org.hibernate.grammars.hql.HqlParserBaseVisitor; + +public class CountParserVisitor extends HqlParserBaseVisitor { + + private int inSimpleQueryGroup; + private StringBuilder sb = new StringBuilder(); + + @Override + public String visitSimpleQueryGroup(SimpleQueryGroupContext ctx) { + inSimpleQueryGroup++; + try { + return super.visitSimpleQueryGroup(ctx); + } finally { + inSimpleQueryGroup--; + } + } + + @Override + public String visitQuery(QueryContext ctx) { + super.visitQuery(ctx); + if (inSimpleQueryGroup == 1 && ctx.selectClause() == null) { + // insert a count because there's no select + sb.append(" select count( * )"); + } + return null; + } + + @Override + public String visitSelectClause(SelectClauseContext ctx) { + if (inSimpleQueryGroup == 1) { + if (ctx.SELECT() != null) { + ctx.SELECT().accept(this); + } + if (ctx.DISTINCT() != null) { + sb.append(" count("); + ctx.DISTINCT().accept(this); + if (ctx.selectionList().children.size() != 1) { + // FIXME: error message should include query + throw new RuntimeException("Cannot count on more than one column"); + } + ctx.selectionList().children.get(0).accept(this); + sb.append(" )"); + } else { + sb.append(" count( * )"); + } + } else { + super.visitSelectClause(ctx); + } + return null; + } + + @Override + public String visitJoin(JoinContext ctx) { + if (inSimpleQueryGroup == 1 && ctx.FETCH() != null) { + // ignore fetch joins for main query + return null; + } + return super.visitJoin(ctx); + } + + @Override + public String visitQueryOrder(QueryOrderContext ctx) { + if (inSimpleQueryGroup == 1) { + // ignore order/limit/offset for main query + return null; + } + return super.visitQueryOrder(ctx); + } + + @Override + public String visitTerminal(TerminalNode node) { + append(node.getText()); + return null; + } + + @Override + protected String defaultResult() { + return null; + } + + @Override + protected String aggregateResult(String aggregate, String nextResult) { + if (nextResult != null) { + append(nextResult); + } + return null; + } + + private void append(String nextResult) { + // don't add space at start, or around dots + if (!sb.isEmpty() && sb.charAt(sb.length() - 1) != '.' && !nextResult.equals(".")) { + sb.append(" "); + } + sb.append(nextResult); + } + + public String result() { + return sb.toString(); + } +} \ No newline at end of file diff --git a/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java b/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java index 3029a33398242c..13230c3c28c2de 100644 --- a/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java +++ b/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java @@ -3,6 +3,12 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CommonTokenStream; +import org.hibernate.grammars.hql.HqlLexer; +import org.hibernate.grammars.hql.HqlParser; +import org.hibernate.grammars.hql.HqlParser.SelectStatementContext; + import io.quarkus.panache.common.Sort; import io.quarkus.panache.common.exception.PanacheQueryException; @@ -17,10 +23,32 @@ public class PanacheJpaUtil { static final Pattern FROM_PATTERN = Pattern.compile("^\\s*FROM\\s+.*", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); - public static String getCountQuery(String query) { + // match a FETCH + static final Pattern FETCH_PATTERN = Pattern.compile(".*\\s+FETCH\\s+.*", + Pattern.CASE_INSENSITIVE | Pattern.DOTALL); + + // match a lone SELECT + static final Pattern LONE_SELECT_PATTERN = Pattern.compile(".*SELECT\\s+.*", + Pattern.CASE_INSENSITIVE | Pattern.DOTALL); + + // match a leading WITH + static final Pattern WITH_PATTERN = Pattern.compile("^\\s*WITH\\s+.*", + Pattern.CASE_INSENSITIVE | Pattern.DOTALL); + + /** + * This turns an HQL (already expanded from Panache-QL) query into a count query, using text manipulation + * if we can, because it's faster, or fall back to using the ORM HQL parser in {@link #getCountQueryUsingParser(String)} + */ + public static String getFastCountQuery(String query) { // try to generate a good count query from the existing query - Matcher selectMatcher = SELECT_PATTERN.matcher(query); String countQuery; + // there are no fast ways to get rid of fetches, or WITH + if (FETCH_PATTERN.matcher(query).matches() + || WITH_PATTERN.matcher(query).matches()) { + return getCountQueryUsingParser(query); + } + // if it starts with select, we can optimise + Matcher selectMatcher = SELECT_PATTERN.matcher(query); if (selectMatcher.matches()) { // this one cannot be null String firstSelection = selectMatcher.group(1).trim(); @@ -36,6 +64,9 @@ public static String getCountQuery(String query) { // it's not distinct, forget the column list countQuery = "SELECT COUNT(*) " + selectMatcher.group(3); } + } else if (LONE_SELECT_PATTERN.matcher(query).matches()) { + // a select anywhere else in there might be tricky + return getCountQueryUsingParser(query); } else if (FROM_PATTERN.matcher(query).matches()) { countQuery = "SELECT COUNT(*) " + query; } else { @@ -51,6 +82,20 @@ public static String getCountQuery(String query) { return countQuery; } + /** + * This turns an HQL (already expanded from Panache-QL) query into a count query, using the + * ORM HQL parser. Slow version, see {@link #getFastCountQuery(String)} for the fast version. + */ + public static String getCountQueryUsingParser(String query) { + HqlLexer lexer = new HqlLexer(CharStreams.fromString(query)); + CommonTokenStream tokens = new CommonTokenStream(lexer); + HqlParser parser = new HqlParser(tokens); + SelectStatementContext statement = parser.selectStatement(); + CountParserVisitor visitor = new CountParserVisitor(); + statement.accept(visitor); + return visitor.result(); + } + public static String getEntityName(Class entityClass) { // FIXME: not true? return entityClass.getName(); @@ -67,10 +112,13 @@ public static String createFindQuery(Class entityClass, String query, int par } String trimmedLc = trimmed.toLowerCase(); - if (trimmedLc.startsWith("from ") || trimmedLc.startsWith("select ")) { + if (trimmedLc.startsWith("from ") + || trimmedLc.startsWith("select ") + || trimmedLc.startsWith("with ")) { return query; } - if (trimmedLc.startsWith("order by ")) { + if (trimmedLc.startsWith("order by ") + || trimmedLc.startsWith("where ")) { return "FROM " + getEntityName(entityClass) + " " + query; } if (trimmedLc.indexOf(' ') == -1 && trimmedLc.indexOf('=') == -1 && paramCount == 1) { @@ -95,9 +143,17 @@ public static String createCountQuery(Class entityClass, String query, int pa return "SELECT COUNT(*) FROM " + getEntityName(entityClass); String trimmedLc = trimmed.toLowerCase(); + // assume these have valid select clauses and let them through + if (trimmedLc.startsWith("select ") + || trimmedLc.startsWith("with ")) { + return query; + } if (trimmedLc.startsWith("from ")) { return "SELECT COUNT(*) " + query; } + if (trimmedLc.startsWith("where ")) { + return "SELECT COUNT(*) FROM " + getEntityName(entityClass) + " " + query; + } if (trimmedLc.startsWith("order by ")) { // ignore it return "SELECT COUNT(*) FROM " + getEntityName(entityClass); @@ -175,7 +231,11 @@ public static String toOrderBy(Sort sort) { Sort.Column column = sort.getColumns().get(i); if (i > 0) sb.append(" , "); - sb.append(column.getName()); + if (sort.isEscapingEnabled()) { + sb.append(escapeColumnName(column.getName())); + } else { + sb.append(column.getName()); + } if (column.getDirection() != Sort.Direction.Ascending) { sb.append(" DESC"); } @@ -191,4 +251,30 @@ public static String toOrderBy(Sort sort) { } return sb.toString(); } + + private static StringBuilder escapeColumnName(String columnName) { + StringBuilder sb = new StringBuilder(); + String[] path = columnName.split("\\."); + for (int j = 0; j < path.length; j++) { + if (j > 0) + sb.append('.'); + sb.append('`').append(unquoteColumnName(path[j])).append('`'); + } + return sb; + } + + private static String unquoteColumnName(String columnName) { + String unquotedColumnName; + //Note HQL uses backticks to escape/quote special words that are used as identifiers + if (columnName.charAt(0) == '`' && columnName.charAt(columnName.length() - 1) == '`') { + unquotedColumnName = columnName.substring(1, columnName.length() - 1); + } else { + unquotedColumnName = columnName; + } + // Note we're not dealing with columns but with entity attributes so no backticks expected in unquoted column name + if (unquotedColumnName.indexOf('`') >= 0) { + throw new PanacheQueryException("Sort column name cannot have backticks"); + } + return unquotedColumnName; + } } diff --git a/extensions/panache/panache-hibernate-common/runtime/src/test/java/io/quarkus/panache/hibernate/common/runtime/CountTest.java b/extensions/panache/panache-hibernate-common/runtime/src/test/java/io/quarkus/panache/hibernate/common/runtime/CountTest.java new file mode 100644 index 00000000000000..d285867a8b3fb2 --- /dev/null +++ b/extensions/panache/panache-hibernate-common/runtime/src/test/java/io/quarkus/panache/hibernate/common/runtime/CountTest.java @@ -0,0 +1,80 @@ +package io.quarkus.panache.hibernate.common.runtime; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class CountTest { + @Test + public void testParser() { + // one column, order/limit/offset + assertCountQueryUsingParser("select count( * ) from bar", "select foo from bar order by foo, bar ASC limit 2 offset 3"); + // two columns + assertCountQueryUsingParser("select count( * ) from bar", "select foo,gee from bar"); + // one column distinct + assertCountQueryUsingParser("select count( distinct foo ) from bar", "select distinct foo from bar"); + // two columns distinct + Assertions.assertThrows(RuntimeException.class, + () -> assertCountQueryUsingParser("XX", "select distinct foo,gee from bar")); + // nested order by not touched + assertCountQueryUsingParser("select count( * ) from ( from entity order by id )", + "select foo from (from entity order by id) order by foo, bar ASC"); + // what happens to literals? + assertCountQueryUsingParser("select count( * ) from bar where some = 2 and other = '23'", + "select foo from bar where some = 2 and other = '23'"); + // fetches are gone + assertCountQueryUsingParser("select count( * ) from bar b", "select foo from bar b left join fetch b.things"); + // non-fetches remain + assertCountQueryUsingParser("select count( * ) from bar b left join b.things", + "select foo from bar b left join b.things"); + + // inverted select + assertCountQueryUsingParser("from bar select count( * )", "from bar select foo"); + // from without select + assertCountQueryUsingParser("from bar select count( * )", "from bar"); + + // CTE + assertFastCountQuery("WITH id AS ( SELECT p.id AS pid FROM Person2 AS p ) SELECT count( * ) FROM Person2 p", + "WITH id AS (SELECT p.id AS pid FROM Person2 AS p) SELECT p FROM Person2 p"); + } + + @Test + public void testFastVersion() { + // one column, order/limit/offset + assertFastCountQuery("SELECT COUNT(*) from bar", "select foo from bar order by foo, bar ASC limit 2 offset 3"); + // two columns + assertFastCountQuery("SELECT COUNT(*) from bar", "select foo,gee from bar"); + // one column distinct + assertFastCountQuery("SELECT COUNT(distinct foo) from bar", "select distinct foo from bar"); + // two columns distinct + Assertions.assertThrows(RuntimeException.class, () -> assertFastCountQuery("XX", "select distinct foo,gee from bar")); + // nested order by not touched + assertFastCountQuery("SELECT COUNT(*) from (from entity order by id)", + "select foo from (from entity order by id) order by foo, bar ASC"); + // what happens to literals? + assertFastCountQuery("SELECT COUNT(*) from bar where some = 2 and other = '23'", + "select foo from bar where some = 2 and other = '23'"); + // fetches are gone + assertFastCountQuery("select count( * ) from bar b", "select foo from bar b left join fetch b.things"); + // non-fetches remain + assertFastCountQuery("SELECT COUNT(*) from bar b left join b.things", "select foo from bar b left join b.things"); + + // inverted select + assertFastCountQuery("from bar select count( * )", "from bar select foo"); + // from without select + assertFastCountQuery("SELECT COUNT(*) from bar", "from bar"); + + // CTE + assertFastCountQuery("WITH id AS ( SELECT p.id AS pid FROM Person2 AS p ) SELECT count( * ) FROM Person2 p", + "WITH id AS (SELECT p.id AS pid FROM Person2 AS p) SELECT p FROM Person2 p"); + } + + private void assertCountQueryUsingParser(String expected, String selectQuery) { + String countQuery = PanacheJpaUtil.getCountQueryUsingParser(selectQuery); + Assertions.assertEquals(expected, countQuery); + } + + private void assertFastCountQuery(String expected, String selectQuery) { + String countQuery = PanacheJpaUtil.getFastCountQuery(selectQuery); + Assertions.assertEquals(expected, countQuery); + } +} diff --git a/extensions/panache/panache-mock/pom.xml b/extensions/panache/panache-mock/pom.xml index d33d2536e2e663..d6db8863936a46 100644 --- a/extensions/panache/panache-mock/pom.xml +++ b/extensions/panache/panache-mock/pom.xml @@ -13,10 +13,6 @@ Quarkus - Panache - Mock Mocking with Panache - - true - - io.quarkus @@ -47,6 +43,31 @@ -proc:none + + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce + + + + + classpath:enforcer-rules/quarkus-banned-dependencies.xml + + + classpath:enforcer-rules/quarkus-banned-dependencies-okhttp.xml + + + + + + diff --git a/extensions/panache/panacheql/.gitignore b/extensions/panache/panacheql/.gitignore new file mode 100644 index 00000000000000..26c2d26a8f47de --- /dev/null +++ b/extensions/panache/panacheql/.gitignore @@ -0,0 +1,3 @@ +gen/** +**/internal/gen/** +/src/main/antlr4/io/quarkus/panacheql/internal/HqlLexer.tokens diff --git a/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlLexer.g4 b/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlLexer.g4 index cff2b1708253a9..6e8e8636f5779f 100644 --- a/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlLexer.g4 +++ b/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlLexer.g4 @@ -10,70 +10,95 @@ lexer grammar HqlLexer; */ } -WS : ( ' ' | '\t' | '\f' | EOL ) -> skip; +WS : WS_CHAR+ -> skip; fragment -EOL : [\r\n]+; +WS_CHAR : [ \f\t\r\n]; -INTEGER_LITERAL : INTEGER_NUMBER ; +COMMENT : '/*' (~'*' | '*' ~'/' )* '*/' -> skip; fragment -INTEGER_NUMBER : ('0' | '1'..'9' '0'..'9'*) ; +DIGIT : [0-9]; -LONG_LITERAL : INTEGER_NUMBER ('l'|'L'); +fragment +HEX_DIGIT : [0-9a-fA-F]; -BIG_INTEGER_LITERAL : INTEGER_NUMBER ('bi'|'BI') ; +fragment +EXPONENT : [eE] [+-]? DIGIT+; -HEX_LITERAL : '0' ('x'|'X') HEX_DIGIT+ ('l'|'L')? ; +fragment +LONG_SUFFIX : [lL]; fragment -HEX_DIGIT : ('0'..'9'|'a'..'f'|'A'..'F') ; +FLOAT_SUFFIX : [fF]; -OCTAL_LITERAL : '0' ('0'..'7')+ ('l'|'L')? ; +fragment +DOUBLE_SUFFIX : [dD]; -FLOAT_LITERAL : FLOATING_POINT_NUMBER ('f'|'F')? ; +fragment +BIG_DECIMAL_SUFFIX : [bB] [dD]; + +fragment +BIG_INTEGER_SUFFIX : [bB] [iI]; + +// Although this is not 100% correct because this accepts leading zeros, +// we stick to this because temporal literals use this rule for simplicity. +// Since we don't support octal literals, this shouldn't really be a big issue +fragment +INTEGER_NUMBER + : DIGIT+ + ; fragment FLOATING_POINT_NUMBER - : ('0'..'9')+ '.' ('0'..'9')* EXPONENT? - | '.' ('0'..'9')+ EXPONENT? - | ('0'..'9')+ EXPONENT - | ('0'..'9')+ + : DIGIT+ '.' DIGIT* EXPONENT? + | '.' DIGIT+ EXPONENT? + | DIGIT+ EXPONENT + | DIGIT+ ; -DOUBLE_LITERAL : FLOATING_POINT_NUMBER ('d'|'D') ; +INTEGER_LITERAL : INTEGER_NUMBER ('_' INTEGER_NUMBER)*; -BIG_DECIMAL_LITERAL : FLOATING_POINT_NUMBER ('bd'|'BD') ; +LONG_LITERAL : INTEGER_NUMBER ('_' INTEGER_NUMBER)* LONG_SUFFIX; -fragment -EXPONENT : ('e'|'E') ('+'|'-')? ('0'..'9')+ ; +FLOAT_LITERAL : FLOATING_POINT_NUMBER FLOAT_SUFFIX; -CHARACTER_LITERAL - : '\'' ( ESCAPE_SEQUENCE | ~('\''|'\\') ) '\'' {setText(getText().substring(1, getText().length()-1));} - ; +DOUBLE_LITERAL : FLOATING_POINT_NUMBER DOUBLE_SUFFIX?; -STRING_LITERAL - : '"' ( ESCAPE_SEQUENCE | ~('\\'|'"') )* '"' {setText(getText().substring(1, getText().length()-1));} - | ('\'' ( ESCAPE_SEQUENCE | ~('\\'|'\'') )* '\'')+ {setText(getText().substring(1, getText().length()-1).replace("''", "'"));} - ; +BIG_INTEGER_LITERAL : INTEGER_NUMBER BIG_INTEGER_SUFFIX; + +BIG_DECIMAL_LITERAL : FLOATING_POINT_NUMBER BIG_DECIMAL_SUFFIX; + +HEX_LITERAL : '0' [xX] HEX_DIGIT+ LONG_SUFFIX?; + +fragment SINGLE_QUOTE : '\''; +fragment DOUBLE_QUOTE : '"'; + +STRING_LITERAL : SINGLE_QUOTE ( SINGLE_QUOTE SINGLE_QUOTE | ~('\'') )* SINGLE_QUOTE; + +JAVA_STRING_LITERAL + : DOUBLE_QUOTE ( ESCAPE_SEQUENCE | ~('"') )* DOUBLE_QUOTE + | [jJ] SINGLE_QUOTE ( ESCAPE_SEQUENCE | ~('\'') )* SINGLE_QUOTE + | [jJ] DOUBLE_QUOTE ( ESCAPE_SEQUENCE | ~('\'') )* DOUBLE_QUOTE + ; + +fragment BACKSLASH : '\\'; fragment ESCAPE_SEQUENCE - : '\\' ('b'|'t'|'n'|'f'|'r'|'\\"'|'\''|'\\') - | UNICODE_ESCAPE - | OCTAL_ESCAPE + : BACKSLASH [btnfr"'] + | BACKSLASH UNICODE_ESCAPE + | BACKSLASH BACKSLASH ; fragment -OCTAL_ESCAPE - : '\\' ('0'..'3') ('0'..'7') ('0'..'7') - | '\\' ('0'..'7') ('0'..'7') - | '\\' ('0'..'7') +UNICODE_ESCAPE + : 'u' HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT ; -fragment -UNICODE_ESCAPE - : '\\' 'u' HEX_DIGIT HEX_DIGIT HEX_DIGIT HEX_DIGIT +BINARY_LITERAL + : [xX] SINGLE_QUOTE (HEX_DIGIT HEX_DIGIT)* SINGLE_QUOTE + | [xX] DOUBLE_QUOTE (HEX_DIGIT HEX_DIGIT)* DOUBLE_QUOTE ; // ESCAPE start tokens @@ -100,7 +125,7 @@ PLUS : '+'; MINUS : '-'; ASTERISK : '*'; SLASH : '/'; -PERCENT : '%'; +PERCENT_OP : '%'; AMPERSAND : '&'; SEMICOLON : ';'; COLON : ':'; @@ -109,123 +134,197 @@ DOUBLE_PIPE : '||'; QUESTION_MARK : '?'; ARROW : '->'; + +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Keywords -ABS : [aA] [bB] [sS]; -AS : [aA] [sS]; + +ID : [iI][dD]; +VERSION : [vV] [eE] [rR] [sS] [iI] [oO] [nN]; +VERSIONED : [vV] [eE] [rR] [sS] [iI] [oO] [nN] [eE] [dD]; +NATURALID : [nN] [aA] [tT] [uU] [rR] [aA] [lL] [iI] [dD]; +FK : [fF] [kK]; + ALL : [aA] [lL] [lL]; AND : [aA] [nN] [dD]; ANY : [aA] [nN] [yY]; +AS : [aA] [sS]; ASC : [aA] [sS] [cC]; AVG : [aA] [vV] [gG]; +BETWEEN : [bB] [eE] [tT] [wW] [eE] [eE] [nN]; +BOTH : [bB] [oO] [tT] [hH]; +BREADTH : [bB] [rR] [eE] [aA] [dD] [tT] [hH]; BY : [bB] [yY]; -BETWEEN : [bB] [eE] [tT] [wW] [eE] [eE] [nN]; -BIT_LENGTH : [bB] [iI] [tT] [_] [lL] [eE] [nN] [gG] [tT] [hH]; -BOTH : [bB] [oO] [tT] [hH]; -CASE : [cC] [aA] [sS] [eE]; -CAST : [cC] [aA] [sS] [tT]; -CHARACTER_LENGTH : [cC] [hH] [aA] [rR] [aA] [cC] [tT] [eE] [rR] '_' [lL] [eE] [nN] [gG] [tT] [hH]; -CLASS : [cC] [lL] [aA] [sS] [sS]; -COALESCE : [cC] [oO] [aA] [lL] [eE] [sS] [cC] [eE]; +CASE : [cC] [aA] [sS] [eE]; +CAST : [cC] [aA] [sS] [tT]; COLLATE : [cC] [oO] [lL] [lL] [aA] [tT] [eE]; -CONCAT : [cC] [oO] [nN] [cC] [aA] [tT]; COUNT : [cC] [oO] [uU] [nN] [tT]; +CROSS : [cC] [rR] [oO] [sS] [sS]; +CUBE : [cC] [uU] [bB] [eE]; +CURRENT : [cC] [uU] [rR] [rR] [eE] [nN] [tT]; CURRENT_DATE : [cC] [uU] [rR] [rR] [eE] [nN] [tT] '_' [dD] [aA] [tT] [eE]; +CURRENT_INSTANT : [cC] [uU] [rR] [rR] [eE] [nN] [tT] '_' [iI] [nN] [sS] [tT] [aA] [nN] [tT]; //deprecated legacy CURRENT_TIME : [cC] [uU] [rR] [rR] [eE] [nN] [tT] '_' [tT] [iI] [mM] [eE]; CURRENT_TIMESTAMP : [cC] [uU] [rR] [rR] [eE] [nN] [tT] '_' [tT] [iI] [mM] [eE] [sS] [tT] [aA] [mM] [pP]; -CROSS : [cC] [rR] [oO] [sS] [sS]; +CYCLE : [cC] [yY] [cC] [lL] [eE]; +DATE : [dD] [aA] [tT] [eE]; +DATETIME : [dD] [aA] [tT] [eE] [tT] [iI] [mM] [eE]; DAY : [dD] [aA] [yY]; +DEFAULT : [dD] [eE] [fF] [aA] [uU] [lL] [tT]; DELETE : [dD] [eE] [lL] [eE] [tT] [eE]; +DEPTH : [dD] [eE] [pP] [tT] [hH]; DESC : [dD] [eE] [sS] [cC]; DISTINCT : [dD] [iI] [sS] [tT] [iI] [nN] [cC] [tT]; +ELEMENT : [eE] [lL] [eE] [mM] [eE] [nN] [tT]; ELEMENTS : [eE] [lL] [eE] [mM] [eE] [nN] [tT] [sS]; ELSE : [eE] [lL] [sS] [eE]; EMPTY : [eE] [mM] [pP] [tT] [yY]; END : [eE] [nN] [dD]; ENTRY : [eE] [nN] [tT] [rR] [yY]; +EPOCH : [eE] [pP] [oO] [cC] [hH]; +ERROR : [eE] [rR] [rR] [oO] [rR]; ESCAPE : [eE] [sS] [cC] [aA] [pP] [eE]; +EVERY : [eE] [vV] [eE] [rR] [yY]; +EXCEPT : [eE] [xX] [cC] [eE] [pP] [tT]; +EXCLUDE : [eE] [xX] [cC] [lL] [uU] [dD] [eE]; EXISTS : [eE] [xX] [iI] [sS] [tT] [sS]; EXTRACT : [eE] [xX] [tT] [rR] [aA] [cC] [tT]; FETCH : [fF] [eE] [tT] [cC] [hH]; +FILTER : [fF] [iI] [lL] [tT] [eE] [rR]; +FIRST : [fF] [iI] [rR] [sS] [tT]; +FOLLOWING : [fF] [oO] [lL] [lL] [oO] [wW] [iI] [nN] [gG]; +FOR : [fF] [oO] [rR]; +FORMAT : [fF] [oO] [rR] [mM] [aA] [tT]; FROM : [fF] [rR] [oO] [mM]; FULL : [fF] [uU] [lL] [lL]; FUNCTION : [fF] [uU] [nN] [cC] [tT] [iI] [oO] [nN]; GROUP : [gG] [rR] [oO] [uU] [pP]; +GROUPS : [gG] [rR] [oO] [uU] [pP] [sS]; HAVING : [hH] [aA] [vV] [iI] [nN] [gG]; HOUR : [hH] [oO] [uU] [rR]; +IGNORE : [iI] [gG] [nN] [oO] [rR] [eE]; +ILIKE : [iI] [lL] [iI] [kK] [eE]; IN : [iI] [nN]; INDEX : [iI] [nN] [dD] [eE] [xX]; +INDICES : [iI] [nN] [dD] [iI] [cC] [eE] [sS]; INNER : [iI] [nN] [nN] [eE] [rR]; INSERT : [iI] [nN] [sS] [eE] [rR] [tT]; +INSTANT : [iI] [nN] [sS] [tT] [aA] [nN] [tT]; +INTERSECT : [iI] [nN] [tT] [eE] [rR] [sS] [eE] [cC] [tT]; INTO : [iI] [nN] [tT] [oO]; IS : [iI] [sS]; JOIN : [jJ] [oO] [iI] [nN]; KEY : [kK] [eE] [yY]; +KEYS : [kK] [eE] [yY] [sS]; +LAST : [lL] [aA] [sS] [tT]; +LATERAL : [lL] [aA] [tT] [eE] [rR] [aA] [lL]; LEADING : [lL] [eE] [aA] [dD] [iI] [nN] [gG]; LEFT : [lL] [eE] [fF] [tT]; -LENGTH : [lL] [eE] [nN] [gG] [tT] [hH]; -LIMIT : [lL] [iI] [mM] [iI] [tT]; LIKE : [lL] [iI] [kK] [eE]; +LIMIT : [lL] [iI] [mM] [iI] [tT]; LIST : [lL] [iI] [sS] [tT]; -LOCATE : [lL] [oO] [cC] [aA] [tT] [eE]; -LOWER : [lL] [oO] [wW] [eE] [rR]; +LISTAGG : [lL] [iI] [sS] [tT] [aA] [gG] [gG]; +LOCAL : [lL] [oO] [cC] [aA] [lL]; +LOCAL_DATE : [lL] [oO] [cC] [aA] [lL] '_' [dD] [aA] [tT] [eE]; +LOCAL_DATETIME : [lL] [oO] [cC] [aA] [lL] '_' [dD] [aA] [tT] [eE] [tT] [iI] [mM] [eE]; +LOCAL_TIME : [lL] [oO] [cC] [aA] [lL] '_' [tT] [iI] [mM] [eE]; MAP : [mM] [aA] [pP]; +MATERIALIZED : [mM] [aA] [tT] [eE] [rR] [iI] [aA] [lL] [iI] [zZ] [eE] [dD]; MAX : [mM] [aA] [xX]; MAXELEMENT : [mM] [aA] [xX] [eE] [lL] [eE] [mM] [eE] [nN] [tT]; MAXINDEX : [mM] [aA] [xX] [iI] [nN] [dD] [eE] [xX]; MEMBER : [mM] [eE] [mM] [bB] [eE] [rR]; +MICROSECOND : [mM] [iI] [cC] [rR] [oO] [sS] [eE] [cC] [oO] [nN] [dD]; +MILLISECOND : [mM] [iI] [lL] [lL] [iI] [sS] [eE] [cC] [oO] [nN] [dD]; MIN : [mM] [iI] [nN]; MINELEMENT : [mM] [iI] [nN] [eE] [lL] [eE] [mM] [eE] [nN] [tT]; MININDEX : [mM] [iI] [nN] [iI] [nN] [dD] [eE] [xX]; MINUTE : [mM] [iI] [nN] [uU] [tT] [eE]; -MOD : [mM] [oO] [dD]; MONTH : [mM] [oO] [nN] [tT] [hH]; +NANOSECOND : [nN] [aA] [nN] [oO] [sS] [eE] [cC] [oO] [nN] [dD]; NEW : [nN] [eE] [wW]; +NEXT : [nN] [eE] [xX] [tT]; +NO : [nN] [oO]; NOT : [nN] [oO] [tT]; -NULLIF : [nN] [uU] [lL] [lL] [iI] [fF]; +NULLS : [nN] [uU] [lL] [lL] [sS]; OBJECT : [oO] [bB] [jJ] [eE] [cC] [tT]; -OCTET_LENGTH : [oO] [cC] [tT] [eE] [tT] '_' [lL] [eE] [nN] [gG] [tT] [hH]; OF : [oO] [fF]; OFFSET : [oO] [fF] [fF] [sS] [eE] [tT]; +OFFSET_DATETIME : [oO] [fF] [fF] [sS] [eE] [tT] '_' [dD] [aA] [tT] [eE] [tT] [iI] [mM] [eE]; ON : [oO] [nN]; +ONLY : [oO] [nN] [lL] [yY]; OR : [oO] [rR]; ORDER : [oO] [rR] [dD] [eE] [rR]; +OTHERS : [oO] [tT] [hH] [eE] [rR] [sS]; OUTER : [oO] [uU] [tT] [eE] [rR]; +OVER : [oO] [vV] [eE] [rR]; +OVERFLOW : [oO] [vV] [eE] [rR] [fF] [lL] [oO] [wW]; +OVERLAY : [oO] [vV] [eE] [rR] [lL] [aA] [yY]; +PAD : [pP] [aA] [dD]; +PARTITION : [pP] [aA] [rR] [tT] [iI] [tT] [iI] [oO] [nN]; +PERCENT : [pP] [eE] [rR] [cC] [eE] [nN] [tT]; +PLACING : [pP] [lL] [aA] [cC] [iI] [nN] [gG]; POSITION : [pP] [oO] [sS] [iI] [tT] [iI] [oO] [nN]; +PRECEDING : [pP] [rR] [eE] [cC] [eE] [dD] [iI] [nN] [gG]; +QUARTER : [qQ] [uU] [aA] [rR] [tT] [eE] [rR]; +RANGE : [rR] [aA] [nN] [gG] [eE]; +RESPECT : [rR] [eE] [sS] [pP] [eE] [cC] [tT]; RIGHT : [rR] [iI] [gG] [hH] [tT]; +ROLLUP : [rR] [oO] [lL] [lL] [uU] [pP]; +ROW : [rR] [oO] [wW]; +ROWS : [rR] [oO] [wW] [sS]; +SEARCH : [sS] [eE] [aA] [rR] [cC] [hH]; SECOND : [sS] [eE] [cC] [oO] [nN] [dD]; SELECT : [sS] [eE] [lL] [eE] [cC] [tT]; SET : [sS] [eE] [tT]; SIZE : [sS] [iI] [zZ] [eE]; -SQRT : [sS] [qQ] [rR] [tT]; -STR : [sS] [tT] [rR]; +SOME : [sS] [oO] [mM] [eE]; SUBSTRING : [sS] [uU] [bB] [sS] [tT] [rR] [iI] [nN] [gG]; -SUBSTR : [sS] [uU] [bB] [sS] [tT] [rR]; -SUM : [sS] [uU] [mM]; +SUM : [sS] [uM] [mM]; THEN : [tT] [hH] [eE] [nN]; +TIES : [tT] [iI] [eE] [sS]; +TIME : [tT] [iI] [mM] [eE]; +TIMESTAMP : [tT] [iI] [mM] [eE] [sS] [tT] [aA] [mM] [pP]; TIMEZONE_HOUR : [tT] [iI] [mM] [eE] [zZ] [oO] [nN] [eE] '_' [hH] [oO] [uU] [rR]; TIMEZONE_MINUTE : [tT] [iI] [mM] [eE] [zZ] [oO] [nN] [eE] '_' [mM] [iI] [nN] [uU] [tT] [eE]; +TO : [tT] [oO]; TRAILING : [tT] [rR] [aA] [iI] [lL] [iI] [nN] [gG]; TREAT : [tT] [rR] [eE] [aA] [tT]; TRIM : [tT] [rR] [iI] [mM]; +TRUNC : [tT] [rR] [uU] [nN] [cC]; +TRUNCATE : [tT] [rR] [uU] [nN] [cC] [aA] [tT] [eE]; TYPE : [tT] [yY] [pP] [eE]; +UNBOUNDED : [uU] [nN] [bB] [oO] [uU] [nN] [dD] [eE] [dD]; +UNION : [uU] [nN] [iI] [oO] [nN]; UPDATE : [uU] [pP] [dD] [aA] [tT] [eE]; -UPPER : [uU] [pP] [pP] [eE] [rR]; +USING : [uU] [sS] [iI] [nN] [gG]; VALUE : [vV] [aA] [lL] [uU] [eE]; +VALUES : [vV] [aA] [lL] [uU] [eE] [sS]; +WEEK : [wW] [eE] [eE] [kK]; WHEN : [wW] [hH] [eE] [nN]; WHERE : [wW] [hH] [eE] [rR] [eE]; WITH : [wW] [iI] [tT] [hH]; +WITHIN : [wW] [iI] [tT] [hH] [iI] [nN]; +WITHOUT : [wW] [iI] [tT] [hH] [oO] [uU] [tT]; YEAR : [yY] [eE] [aA] [rR]; +ZONED : [zZ] [oO] [nN] [eE] [dD]; // case-insensitive true, false and null recognition (split vote :) TRUE : [tT] [rR] [uU] [eE]; FALSE : [fF] [aA] [lL] [sS] [eE]; NULL : [nN] [uU] [lL] [lL]; + +fragment +LETTER : [a-zA-Z\u0080-\ufffe_$]; + // Identifiers IDENTIFIER - : ('a'..'z'|'A'..'Z'|'_'|'$'|'\u0080'..'\ufffe')('a'..'z'|'A'..'Z'|'_'|'$'|'0'..'9'|'\u0080'..'\ufffe')* + : LETTER (LETTER | DIGIT)* ; +fragment +BACKTICK : '`'; + QUOTED_IDENTIFIER - : '`' ( ESCAPE_SEQUENCE | ~('\\'|'`') )* '`' + : BACKTICK ( ESCAPE_SEQUENCE | '\\' BACKTICK | ~([`]) )* BACKTICK ; diff --git a/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlParser.g4 b/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlParser.g4 index 25810b07f25ce2..f666d873b7ff51 100644 --- a/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlParser.g4 +++ b/extensions/panache/panacheql/src/main/antlr4/io/quarkus/panacheql/internal/HqlParser.g4 @@ -22,107 +22,249 @@ options { // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Statements +/** + * Toplevel rule, entrypoint to the whole grammar + */ statement - : ( selectStatement | updateStatement | deleteStatement | insertStatement ) EOF + : (selectStatement | updateStatement | deleteStatement | insertStatement) EOF ; +/** + * A 'select' query + */ selectStatement - : querySpec + : queryExpression + ; + +/** + * A 'select' query that occurs within another statement + */ +subquery + : queryExpression ; +/** + * A declaration of a root entity, with an optional identification variable + */ +targetEntity + : entityName variable? + ; + +/** + * A 'delete' statement + */ deleteStatement - : DELETE FROM? entityName identificationVariableDef? whereClause? + : DELETE FROM? targetEntity whereClause? ; +/** + * An 'update' statement + */ updateStatement - : UPDATE FROM? entityName identificationVariableDef? setClause whereClause? + : UPDATE VERSIONED? targetEntity setClause whereClause? ; +/** + * An 'set' list of assignments in an 'update' statement + */ setClause - : SET assignment+ + : SET assignment (COMMA assignment)* ; +/** + * An assignment to an entity attribute in an 'update' statement + */ assignment - : dotIdentifierSequence EQUAL expression + : simplePath EQUAL expressionOrPredicate ; +/** + * An 'insert' statement + */ insertStatement -// todo (6.0 : VERSIONED - : INSERT insertSpec querySpec + : INSERT INTO? targetEntity targetFields (queryExpression | valuesList) ; -insertSpec - : intoSpec targetFieldsSpec +/** + * The list of target entity attributes in an 'insert' statement + */ +targetFields + : LEFT_PAREN simplePath (COMMA simplePath)* RIGHT_PAREN ; -intoSpec - : INTO entityName +/** + * A 'values' clause in an 'insert' statement, with one of more tuples of values to insert + */ +valuesList + : VALUES values (COMMA values)* ; -targetFieldsSpec - : - LEFT_PAREN dotIdentifierSequence (COMMA dotIdentifierSequence)* RIGHT_PAREN +/** + * A tuple of values to insert in an 'insert' statement + */ +values + : LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)* RIGHT_PAREN ; // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // QUERY SPEC - general structure of root sqm or sub sqm -querySpec - : selectClause? fromClause whereClause? ( groupByClause havingClause? )? orderByClause? limitClause? offsetClause? +withClause + : WITH cte (COMMA cte)* + ; + +cte + : identifier AS (NOT? MATERIALIZED)? LEFT_PAREN queryExpression RIGHT_PAREN searchClause? cycleClause? + ; + +cteAttributes + : identifier (COMMA identifier)* + ; + +searchClause + : SEARCH (BREADTH|DEPTH) FIRST BY searchSpecifications SET identifier + ; + +searchSpecifications + : searchSpecification (COMMA searchSpecification)* + ; + +searchSpecification + : identifier sortDirection? nullsPrecedence? + ; + +cycleClause + : CYCLE cteAttributes SET identifier (TO literal DEFAULT literal)? (USING identifier)? + ; + +/** + * A toplevel query of subquery, which may be a union or intersection of subqueries + */ +queryExpression + : withClause? orderedQuery # SimpleQueryGroup + | withClause? orderedQuery (setOperator orderedQuery)+ # SetQueryGroup + ; + +/** + * A query with an optional 'order by' clause + */ +orderedQuery + : query queryOrder? # QuerySpecExpression + | LEFT_PAREN queryExpression RIGHT_PAREN queryOrder? # NestedQueryExpression + | queryOrder # QueryOrderExpression + ; + +/** + * An operator whose operands are whole queries + */ +setOperator + : UNION ALL? + | INTERSECT ALL? + | EXCEPT ALL? + ; + +/** + * The 'order by' clause and optional subclauses for limiting and pagination + */ +queryOrder + : orderByClause limitClause? offsetClause? fetchClause? + ; + +/** + * An unordered query, with just projection, restriction, and aggregation + * + * - The 'select' clause may come first, in which case 'from' is optional + * - The 'from' clause may come first, in which case 'select' is optional, and comes last + */ +query +// TODO: add with clause + : selectClause fromClause? whereClause? (groupByClause havingClause?)? + | fromClause whereClause? (groupByClause havingClause?)? selectClause? + | whereClause ; // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // FROM clause +/** + * The 'from' clause of a query + */ fromClause - : FROM fromClauseSpace (COMMA fromClauseSpace)* + : FROM entityWithJoins (COMMA entityWithJoins)* ; -fromClauseSpace - : pathRoot ( crossJoin | jpaCollectionJoin | qualifiedJoin )* +/** + * The declaration of a root entity in 'from' clause, along with its joins + */ +entityWithJoins + : fromRoot (join | crossJoin | jpaCollectionJoin)* ; -pathRoot - : entityName (identificationVariableDef)? +/** + * A root entity declaration in the 'from' clause, with optional identification variable + */ +fromRoot + : entityName variable? # RootEntity + | LEFT_PAREN subquery RIGHT_PAREN variable? # RootSubquery ; /** - * Rule for dotIdentifierSequence where we expect an entity-name. The extra - * "rule layer" allows the walker to specially handle such a case (to use a special - * org.hibernate.query.hql.DotIdentifierConsumer, etc) + * An entity name, for identifying the root entity */ entityName - : dotIdentifierSequence + : identifier (DOT identifier)* ; -identificationVariableDef - : (AS identifier) - | IDENTIFIER +/** + * An identification variable (an entity alias) + */ +variable + : AS identifier + | nakedIdentifier ; +/** + * A 'cross join' to a second root entity (a cartesian product) + */ crossJoin - : CROSS JOIN pathRoot (identificationVariableDef)? + : CROSS JOIN entityName variable? ; +/** + * Deprecated syntax dating back to EJB-QL prior to EJB 3, required by JPA, never documented in Hibernate + */ jpaCollectionJoin - : COMMA IN LEFT_PAREN path RIGHT_PAREN (identificationVariableDef)? + : COMMA IN LEFT_PAREN path RIGHT_PAREN variable? ; -qualifiedJoin - : joinTypeQualifier JOIN FETCH? qualifiedJoinRhs (qualifiedJoinPredicate)? +/** + * A 'join', with an optional 'on' or 'with' clause + */ +join + : joinType JOIN FETCH? joinTarget joinRestriction? ; -joinTypeQualifier +/** + * The inner or outer join type + */ +joinType : INNER? | (LEFT|RIGHT|FULL)? OUTER? ; -qualifiedJoinRhs - : path (identificationVariableDef)? +/** + * The joined path, with an optional identification variable + */ +joinTarget + : path variable? #JoinPath + | LATERAL? LEFT_PAREN subquery RIGHT_PAREN variable? #JoinSubquery ; -qualifiedJoinPredicate +/** + * An extra restriction added to the join condition + */ +joinRestriction : (ON | WITH) predicate ; @@ -131,472 +273,1064 @@ qualifiedJoinPredicate // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // SELECT clause +/** + * The 'select' clause of a query + */ selectClause - : SELECT DISTINCT? selectionList + : SELECT DISTINCT? selectionList ; +/** + * A projection list: a list of selected items + */ selectionList : selection (COMMA selection)* ; +/** + * An element of a projection list: a selected item, with an optional alias + */ selection - : selectExpression (resultIdentifier)? + : selectExpression variable? ; +/** + * A selected item ocurring in the 'select' clause + */ selectExpression - : dynamicInstantiation - | jpaSelectObjectSyntax - | mapEntrySelection - | expression - ; - -resultIdentifier - : (AS identifier) - | IDENTIFIER + : instantiation + | mapEntrySelection + | jpaSelectObjectSyntax + | expressionOrPredicate ; +/** + * The special function entry() which may only occur in the 'select' clause + */ mapEntrySelection : ENTRY LEFT_PAREN path RIGHT_PAREN ; -dynamicInstantiation - : NEW dynamicInstantiationTarget LEFT_PAREN dynamicInstantiationArgs RIGHT_PAREN +/** + * Instantiation using 'select new' + */ +instantiation + : NEW instantiationTarget LEFT_PAREN instantiationArguments RIGHT_PAREN ; -dynamicInstantiationTarget +/** + * The type to be instantiated with 'select new', 'list', 'map', or a fuly-qualified Java class name + */ +instantiationTarget : LIST | MAP - | dotIdentifierSequence + | simplePath ; -dynamicInstantiationArgs - : dynamicInstantiationArg ( COMMA dynamicInstantiationArg )* +/** + * The arguments to a 'select new' instantiation + */ +instantiationArguments + : instantiationArgument (COMMA instantiationArgument)* ; -dynamicInstantiationArg - : dynamicInstantiationArgExpression (AS? identifier)? +/** + * A single argument in a 'select new' instantiation, with an optional alias + */ +instantiationArgument + : instantiationArgumentExpression variable? ; -dynamicInstantiationArgExpression - : expression - | dynamicInstantiation +/** + * A single argument in a 'select new' instantiation: an expression, or a nested instantiation + */ +instantiationArgumentExpression + : expressionOrPredicate + | instantiation ; +/** + * Deprecated syntax dating back to EJB-QL prior to EJB 3, required by JPA, never documented in Hibernate + */ jpaSelectObjectSyntax - : OBJECT LEFT_PAREN identifier RIGHT_PAREN + : OBJECT LEFT_PAREN identifier RIGHT_PAREN ; - // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Path structures -dotIdentifierSequence - : identifier dotIdentifierSequenceContinuation* +/** + * A simple path expression + * + * - a reference to an identification variable (not case-sensitive), + * - followed by a list of period-separated identifiers (case-sensitive) + */ +simplePath + : identifier simplePathElement* ; -dotIdentifierSequenceContinuation +/** + * An element of a simple path expression: a period, and an identifier (case-sensitive) + */ +simplePathElement : DOT identifier ; - /** + * A much more complicated path expression involving operators and functions + * * A path which needs to be resolved semantically. This recognizes * any path-like structure. Generally, the path is semantically * interpreted by the consumer of the parse-tree. However, there * are certain cases where we can syntactically recognize a navigable - * path; see `syntacticNavigablePath` rule + * path; see 'syntacticNavigablePath' rule */ path - : syntacticDomainPath (pathContinuation)? + : syntacticDomainPath pathContinuation? | generalPathFragment ; +/** + * A continuation of a path expression "broken" by an operator or function + */ pathContinuation - : DOT dotIdentifierSequence (DOT pathContinuation)? + : DOT simplePath ; /** + * An operator or function that may occur within a path expression + * * Rule for cases where we syntactically know that the path is a * "domain path" because it is one of these special cases: * * * TREAT( path ) * * ELEMENTS( path ) + * * INDICES( path ) * * VALUE( path ) * * KEY( path ) * * path[ selector ] */ syntacticDomainPath : treatedNavigablePath - | collectionElementNavigablePath + | collectionValueNavigablePath | mapKeyNavigablePath - | dotIdentifierSequence indexedPathAccessFragment + | simplePath indexedPathAccessFragment ; /** - * The main path rule. Recognition for all normal path structures including + * The main path rule + * + * Recognition for all normal path structures including * class, field and enum references as well as navigable paths. * * NOTE : this rule does *not* cover the special syntactic navigable path * cases: TREAT, KEY, ELEMENTS, VALUES */ generalPathFragment - : dotIdentifierSequence (indexedPathAccessFragment)? + : simplePath indexedPathAccessFragment? ; +/** + * In index operator that "breaks" a path expression + */ indexedPathAccessFragment : LEFT_BRACKET expression RIGHT_BRACKET (DOT generalPathFragment)? ; +/** + * A 'treat()' function that "breaks" a path expression + */ treatedNavigablePath - : TREAT LEFT_PAREN path AS dotIdentifierSequence RIGHT_PAREN (pathContinuation)? + : TREAT LEFT_PAREN path AS simplePath RIGHT_PAREN pathContinuation? ; -collectionElementNavigablePath - : (VALUE | ELEMENTS) LEFT_PAREN path RIGHT_PAREN (pathContinuation)? +/** + * A 'value()' function that "breaks" a path expression + */ +collectionValueNavigablePath + : elementValueQuantifier LEFT_PAREN path RIGHT_PAREN pathContinuation? ; +/** + * A 'key()' or 'index()' function that "breaks" a path expression + */ mapKeyNavigablePath - : KEY LEFT_PAREN path RIGHT_PAREN (pathContinuation)? + : indexKeyQuantifier LEFT_PAREN path RIGHT_PAREN pathContinuation? ; // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // GROUP BY clause +/** + * The 'group by' clause of a query, controls aggregation + */ groupByClause - : GROUP BY groupingSpecification - ; - -groupingSpecification - : groupingValue ( COMMA groupingValue )* + : GROUP BY groupByExpression (COMMA groupByExpression)* ; -groupingValue - : expression collationSpecification? +/** + * A grouped item that occurs in the 'group by' clause + * + * a select item alias, an ordinal position of a select item, or an expression + */ +groupByExpression + : identifier + | INTEGER_LITERAL + | expression ; - // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //HAVING clause +/** + * The 'having' clause of a query, a restriction on the grouped data + */ havingClause - : HAVING predicate + : HAVING predicate ; // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ORDER BY clause +/** + * The 'order by' clause of a query, controls sorting + */ orderByClause -// todo (6.0) : null precedence : ORDER BY sortSpecification (COMMA sortSpecification)* ; +/** + * Specialized rule for ordered Map and Set '@OrderBy' handling + */ +orderByFragment + : sortSpecification (COMMA sortSpecification)* + ; + +/** + * A rule for sorting an item in the 'order by' clause + */ sortSpecification - : expression collationSpecification? orderingSpecification? + : sortExpression sortDirection? nullsPrecedence? ; -collationSpecification - : COLLATE collateName +/** + * A rule for sorting null values + */ +nullsPrecedence + : NULLS (FIRST | LAST) + ; + +/** + * A sorted item that occurs in the 'order by' clause + * + * a select item alias, an ordinal position of a select item, or an expression + */ +sortExpression + : identifier + | INTEGER_LITERAL + | expression ; -collateName - : dotIdentifierSequence +/** + * The direction in which to sort + */ +sortDirection + : ASC + | DESC + ; + +/** + * The special 'collate()' functions + */ +collateFunction + : COLLATE LEFT_PAREN expression AS collation RIGHT_PAREN ; -orderingSpecification - : ASC - | DESC +/** + * The name of a database-defined collation + * + * Certain databases allow a period in a collation name + */ +collation + : simplePath ; + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // LIMIT/OFFSET clause +/** + * A 'limit' on the number of query results + */ limitClause - : LIMIT parameterOrNumberLiteral + : LIMIT parameterOrIntegerLiteral ; +/** + * An 'offset' of the first query result to return + */ offsetClause - : OFFSET parameterOrNumberLiteral + : OFFSET parameterOrIntegerLiteral + (ROW | ROWS)? // no semantics + ; + +/** + * A much more complex syntax for limits + */ +fetchClause + : FETCH + (FIRST | NEXT) // no semantics + fetchCountOrPercent + (ROW | ROWS) // no semantics + (ONLY | WITH TIES) + ; + +fetchCountOrPercent + : parameterOrIntegerLiteral + | parameterOrNumberLiteral PERCENT + ; + +/** + * An parameterizable integer literal + */ +parameterOrIntegerLiteral + : parameter + | INTEGER_LITERAL ; +/** + * An parameterizable numeric literal + */ parameterOrNumberLiteral : parameter | INTEGER_LITERAL + | LONG_LITERAL + | FLOAT_LITERAL + | DOUBLE_LITERAL ; // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // WHERE clause & Predicates +/** + * The 'were' clause of a query, update statement, or delete statement + */ whereClause - : WHERE predicate + : WHERE predicate ; +/** + * A boolean-valued expression, usually used to express a restriction + */ predicate - : LEFT_PAREN predicate RIGHT_PAREN # GroupedPredicate - | predicate OR predicate # OrPredicate - | predicate AND predicate # AndPredicate - | NOT predicate # NegatedPredicate - | expression IS (NOT)? NULL # IsNullPredicate - | expression IS (NOT)? EMPTY # IsEmptyPredicate - | expression EQUAL expression # EqualityPredicate - | expression NOT_EQUAL expression # InequalityPredicate - | expression GREATER expression # GreaterThanPredicate - | expression GREATER_EQUAL expression # GreaterThanOrEqualPredicate - | expression LESS expression # LessThanPredicate - | expression LESS_EQUAL expression # LessThanOrEqualPredicate - | expression (NOT)? IN inList # InPredicate - | expression (NOT)? BETWEEN expression AND expression # BetweenPredicate - | expression (NOT)? LIKE expression (likeEscape)? # LikePredicate - | MEMBER OF path # MemberOfPredicate + //highest to lowest precedence + : LEFT_PAREN predicate RIGHT_PAREN # GroupedPredicate + | expression IS NOT? NULL # IsNullPredicate + | expression IS NOT? EMPTY # IsEmptyPredicate + | expression IS NOT? TRUE # IsTruePredicate + | expression IS NOT? FALSE # IsFalsePredicate + | expression IS NOT? DISTINCT FROM expression # IsDistinctFromPredicate + | expression NOT? MEMBER OF? path # MemberOfPredicate + | expression NOT? IN inList # InPredicate + | expression NOT? BETWEEN expression AND expression # BetweenPredicate + | expression NOT? (LIKE | ILIKE) expression likeEscape? # LikePredicate + | expression comparisonOperator expression # ComparisonPredicate + | EXISTS collectionQuantifier LEFT_PAREN simplePath RIGHT_PAREN # ExistsCollectionPartPredicate + | EXISTS expression # ExistsPredicate + | NOT predicate # NegatedPredicate + | predicate AND predicate # AndPredicate + | predicate OR predicate # OrPredicate + | expression # BooleanExpressionPredicate + ; + +/** + * An operator which compares values for equality or order + */ +comparisonOperator + : EQUAL + | NOT_EQUAL + | GREATER + | GREATER_EQUAL + | LESS + | LESS_EQUAL ; +/** + * Any right operand of the 'in' operator + * + * A list of values, a parameter (for a parameterized list of values), a subquery, or an 'elements()' or 'indices()' function + */ inList - : ELEMENTS? LEFT_PAREN dotIdentifierSequence RIGHT_PAREN # PersistentCollectionReferenceInList - | LEFT_PAREN expression (COMMA expression)* RIGHT_PAREN # ExplicitTupleInList - | expression # SubQueryInList + : collectionQuantifier LEFT_PAREN simplePath RIGHT_PAREN # PersistentCollectionReferenceInList + | LEFT_PAREN (expressionOrPredicate (COMMA expressionOrPredicate)*)? RIGHT_PAREN# ExplicitTupleInList + | LEFT_PAREN subquery RIGHT_PAREN # SubqueryInList + | parameter # ParamInList ; +/** + * A single character used to escape the '_' and '%' wildcards in a 'like' pattern + */ likeEscape - : ESCAPE expression + : ESCAPE (STRING_LITERAL | JAVA_STRING_LITERAL | parameter) ; // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Expression +/** + * An expression, excluding boolean expressions + */ expression - : expression DOUBLE_PIPE expression # ConcatenationExpression - | expression PLUS expression # AdditionExpression - | expression MINUS expression # SubtractionExpression - | expression ASTERISK expression # MultiplicationExpression - | expression SLASH expression # DivisionExpression - | expression PERCENT expression # ModuloExpression - // todo (6.0) : should these unary plus/minus rules only apply to literals? - // if so, move the MINUS / PLUS recognition to the `literal` rule - // specificcally for numeric literals - | MINUS expression # UnaryMinusExpression - | PLUS expression # UnaryPlusExpression - | caseStatement # CaseExpression - | coalesce # CoalesceExpression - | nullIf # NullIfExpression - | literal # LiteralExpression - | parameter # ParameterExpression - | entityTypeReference # EntityTypeExpression - | path # PathExpression - | function # FunctionExpression - | LEFT_PAREN querySpec RIGHT_PAREN # SubQueryExpression + //highest to lowest precedence + : LEFT_PAREN expression RIGHT_PAREN # GroupedExpression + | LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)+ RIGHT_PAREN # TupleExpression + | LEFT_PAREN subquery RIGHT_PAREN # SubqueryExpression + | primaryExpression # BarePrimaryExpression + | signOperator numericLiteral # UnaryNumericLiteralExpression + | signOperator expression # UnaryExpression + | expression datetimeField # ToDurationExpression + | expression BY datetimeField # FromDurationExpression + | expression multiplicativeOperator expression # MultiplicationExpression + | expression additiveOperator expression # AdditionExpression + | expression DOUBLE_PIPE expression # ConcatenationExpression ; -entityTypeReference - : TYPE LEFT_PAREN (path | parameter) RIGHT_PAREN +/** + * An expression not involving operators + */ +primaryExpression + : caseList # CaseExpression + | literal # LiteralExpression + | parameter # ParameterExpression + | entityTypeReference # EntityTypeExpression + | entityIdReference # EntityIdExpression + | entityVersionReference # EntityVersionExpression + | entityNaturalIdReference # EntityNaturalIdExpression + | toOneFkReference # ToOneFkExpression + | syntacticDomainPath pathContinuation? # SyntacticPathExpression + | function # FunctionExpression + | generalPathFragment # GeneralPathExpression ; -caseStatement - : simpleCaseStatement - | searchedCaseStatement +/** + * Any expression, including boolean expressions + */ +expressionOrPredicate + : expression + | predicate ; -simpleCaseStatement - : CASE expression (simpleCaseWhen)+ (caseOtherwise)? END +collectionQuantifier + : elementsValuesQuantifier + | indicesKeysQuantifier ; -simpleCaseWhen - : WHEN expression THEN expression +elementValueQuantifier + : ELEMENT + | VALUE ; -caseOtherwise - : ELSE expression +indexKeyQuantifier + : INDEX + | KEY ; -searchedCaseStatement - : CASE (searchedCaseWhen)+ (caseOtherwise)? END +elementsValuesQuantifier + : ELEMENTS + | VALUES ; -searchedCaseWhen - : WHEN predicate THEN expression +indicesKeysQuantifier + : INDICES + | KEYS ; -coalesce - : COALESCE LEFT_PAREN expression (COMMA expression)+ RIGHT_PAREN +/** + * A binary operator with the same precedence as * + */ +multiplicativeOperator + : SLASH + | PERCENT_OP + | ASTERISK ; -nullIf - : NULLIF LEFT_PAREN expression COMMA expression RIGHT_PAREN +/** + * A binary operator with the same precedence as + + */ +additiveOperator + : PLUS + | MINUS ; -literal - : STRING_LITERAL - | CHARACTER_LITERAL - | INTEGER_LITERAL - | LONG_LITERAL - | BIG_INTEGER_LITERAL - | FLOAT_LITERAL - | DOUBLE_LITERAL - | BIG_DECIMAL_LITERAL - | HEX_LITERAL - | OCTAL_LITERAL - | NULL - | TRUE - | FALSE - | timestampLiteral - | dateLiteral - | timeLiteral +/** + * A unary prefix operator + */ +signOperator + : PLUS + | MINUS ; -// todo (6.0) : expand temporal literal support to Java 8 temporal types -// * Instant -> {instant '...'} -// * LocalDate -> {localDate '...'} -// * LocalDateTime -> {localDateTime '...'} -// * OffsetDateTime -> {offsetDateTime '...'} -// * OffsetTime -> {offsetTime '...'} -// * ZonedDateTime -> {localDate '...'} -// * ... -// -// Few things: -// 1) the markers above are just initial thoughts. They are obviously verbose. Maybe acronyms or shortened forms would be better -// 2) we may want to stay away from all of the timezone headaches by not supporting local, zoned and offset forms - -timestampLiteral - : TIMESTAMP_ESCAPE_START dateTimeLiteralText RIGHT_BRACE +/** + * The special function 'type()' + */ +entityTypeReference + : TYPE LEFT_PAREN (path | parameter) RIGHT_PAREN ; -dateLiteral - : DATE_ESCAPE_START dateTimeLiteralText RIGHT_BRACE +/** + * The special function 'id()' + */ +entityIdReference + : ID LEFT_PAREN path RIGHT_PAREN pathContinuation? ; -timeLiteral - : TIME_ESCAPE_START dateTimeLiteralText RIGHT_BRACE +/** + * The special function 'version()' + */ +entityVersionReference + : VERSION LEFT_PAREN path RIGHT_PAREN ; -dateTimeLiteralText - : STRING_LITERAL | CHARACTER_LITERAL +/** + * The special function 'naturalid()' + */ +entityNaturalIdReference + : NATURALID LEFT_PAREN path RIGHT_PAREN pathContinuation? ; -parameter - : COLON identifier # NamedParameter - | QUESTION_MARK INTEGER_LITERAL? # PositionalParameter +/** + * The special function 'fk()' + */ +toOneFkReference + : FK LEFT_PAREN path RIGHT_PAREN ; -function - : standardFunction - | aggregateFunction - | jpaCollectionFunction - | hqlCollectionFunction - | jpaNonStandardFunction - | nonStandardFunction +/** + * A 'case' expression, which comes in two forms: "simple", and "searched" + */ +caseList + : simpleCaseList + | searchedCaseList ; -jpaNonStandardFunction - : FUNCTION LEFT_PAREN jpaNonStandardFunctionName (COMMA nonStandardFunctionArguments)? RIGHT_PAREN +/** + * A simple 'case' expression + */ +simpleCaseList + : CASE expressionOrPredicate simpleCaseWhen+ caseOtherwise? END ; -jpaNonStandardFunctionName - : STRING_LITERAL +/** + * The 'when' clause of a simple case + */ +simpleCaseWhen + : WHEN expression THEN expressionOrPredicate + ; + +/** + * The 'else' clause of a 'case' expression + */ +caseOtherwise + : ELSE expressionOrPredicate + ; + +/** + * A searched 'case' expression + */ +searchedCaseList + : CASE searchedCaseWhen+ caseOtherwise? END + ; + +/** + * The 'when' clause of a searched case + */ +searchedCaseWhen + : WHEN predicate THEN expressionOrPredicate + ; + +/** + * A literal value + */ +literal + : STRING_LITERAL + | JAVA_STRING_LITERAL + | NULL + | booleanLiteral + | numericLiteral + | binaryLiteral + | temporalLiteral + | generalizedLiteral + ; + +/** + * A boolean literal value + */ +booleanLiteral + : TRUE + | FALSE + ; + +/** + * A numeric literal value, including hexadecimal literals + */ +numericLiteral + : INTEGER_LITERAL + | LONG_LITERAL + | BIG_INTEGER_LITERAL + | FLOAT_LITERAL + | DOUBLE_LITERAL + | BIG_DECIMAL_LITERAL + | HEX_LITERAL + ; + +/** + * A binary literal value, as a SQL-style literal, or a braced list of byte literals + */ +binaryLiteral + : BINARY_LITERAL + | LEFT_BRACE HEX_LITERAL (COMMA HEX_LITERAL)* RIGHT_BRACE ; -nonStandardFunction - : nonStandardFunctionName LEFT_PAREN nonStandardFunctionArguments? RIGHT_PAREN +/** + * A literal date, time, or datetime, in HQL syntax, or as a JDBC-style "escape" syntax + */ +temporalLiteral + : dateTimeLiteral + | dateLiteral + | timeLiteral + | jdbcTimestampLiteral + | jdbcDateLiteral + | jdbcTimeLiteral ; -nonStandardFunctionName - : dotIdentifierSequence +/** + * A literal datetime, in braces, or with the 'datetime' keyword + */ +dateTimeLiteral + : localDateTimeLiteral + | zonedDateTimeLiteral + | offsetDateTimeLiteral ; -nonStandardFunctionArguments - : expression (COMMA expression)* +localDateTimeLiteral + : LEFT_BRACE localDateTime RIGHT_BRACE + | LOCAL? DATETIME localDateTime ; -jpaCollectionFunction - : SIZE LEFT_PAREN path RIGHT_PAREN # CollectionSizeFunction - | INDEX LEFT_PAREN identifier RIGHT_PAREN # CollectionIndexFunction +zonedDateTimeLiteral + : LEFT_BRACE zonedDateTime RIGHT_BRACE + | ZONED? DATETIME zonedDateTime ; -hqlCollectionFunction - : MAXINDEX LEFT_PAREN path RIGHT_PAREN # MaxIndexFunction - | MAXELEMENT LEFT_PAREN path RIGHT_PAREN # MaxElementFunction - | MININDEX LEFT_PAREN path RIGHT_PAREN # MinIndexFunction - | MINELEMENT LEFT_PAREN path RIGHT_PAREN # MinElementFunction +offsetDateTimeLiteral + : LEFT_BRACE offsetDateTime RIGHT_BRACE + | OFFSET? DATETIME offsetDateTimeWithMinutes + ; + +/** + * A literal date, in braces, or with the 'date' keyword + */ +dateLiteral + : LEFT_BRACE date RIGHT_BRACE + | LOCAL? DATE date + ; + +/** + * A literal time, in braces, or with the 'time' keyword + */ +timeLiteral + : LEFT_BRACE time RIGHT_BRACE + | LOCAL? TIME time + ; + +/** + * A literal datetime + */ + dateTime + : localDateTime + | zonedDateTime + | offsetDateTime + ; + +localDateTime + : date time + ; + +zonedDateTime + : date time zoneId + ; + +offsetDateTime + : date time offset + ; + +offsetDateTimeWithMinutes + : date time offsetWithMinutes + ; + +/** + * A literal date + */ +date + : year MINUS month MINUS day + ; + +/** + * A literal time + */ +time + : hour COLON minute (COLON second)? + ; + +/** + * A literal offset + */ +offset + : (PLUS | MINUS) hour (COLON minute)? ; +offsetWithMinutes + : (PLUS | MINUS) hour COLON minute + ; + +year: INTEGER_LITERAL; +month: INTEGER_LITERAL; +day: INTEGER_LITERAL; +hour: INTEGER_LITERAL; +minute: INTEGER_LITERAL; +second: INTEGER_LITERAL | DOUBLE_LITERAL; +zoneId + : IDENTIFIER (SLASH IDENTIFIER)? + | STRING_LITERAL; + +/** + * A JDBC-style timestamp escape, as required by JPQL + */ +jdbcTimestampLiteral + : TIMESTAMP_ESCAPE_START (dateTime | genericTemporalLiteralText) RIGHT_BRACE + ; + +/** + * A JDBC-style date escape, as required by JPQL + */ +jdbcDateLiteral + : DATE_ESCAPE_START (date | genericTemporalLiteralText) RIGHT_BRACE + ; + +/** + * A JDBC-style time escape, as required by JPQL + */ +jdbcTimeLiteral + : TIME_ESCAPE_START (time | genericTemporalLiteralText) RIGHT_BRACE + ; + +genericTemporalLiteralText + : STRING_LITERAL + ; + +/** + * A generic format for specifying literal values of arbitary types + */ +generalizedLiteral + : LEFT_BRACE generalizedLiteralType COLON generalizedLiteralText RIGHT_BRACE + ; + +generalizedLiteralType : STRING_LITERAL; +generalizedLiteralText : STRING_LITERAL; + + +/** + * A query parameter: a named parameter, or an ordinal parameter + */ +parameter + : COLON identifier # NamedParameter + | QUESTION_MARK INTEGER_LITERAL? # PositionalParameter + ; + +/** + * A function invocation that may occur in an arbitrary expression + */ +function + : standardFunction + | aggregateFunction + | collectionSizeFunction + | collectionAggregateFunction + | collectionFunctionMisuse + | jpaNonstandardFunction + | genericFunction + ; + +/** + * A syntax for calling user-defined or native database functions, required by JPQL + */ +jpaNonstandardFunction + : FUNCTION LEFT_PAREN jpaNonstandardFunctionName (COMMA genericFunctionArguments)? RIGHT_PAREN + ; + +/** + * The name of a user-defined or native database function, given as a quoted string + */ +jpaNonstandardFunctionName + : STRING_LITERAL + ; + +/** + * Any function invocation that follows the regular syntax + * + * The function name, followed by a parenthesized list of comma-separated expressions + */ +genericFunction + : genericFunctionName LEFT_PAREN (genericFunctionArguments | ASTERISK)? RIGHT_PAREN + nthSideClause? nullsClause? withinGroupClause? filterClause? overClause? + ; + +/** + * The name of a generic function, which may contain periods and quoted identifiers + * + * Names of generic functions are resolved against the SqmFunctionRegistry + */ +genericFunctionName + : simplePath + ; + +/** + * The arguments of a generic function + */ +genericFunctionArguments + : (DISTINCT | datetimeField COMMA)? expressionOrPredicate (COMMA expressionOrPredicate)* + ; + +/** + * The special 'size()' function defined by JPQL + */ +collectionSizeFunction + : SIZE LEFT_PAREN path RIGHT_PAREN + ; + +/** + * Special rule for 'max(elements())`, 'avg(keys())', 'sum(indices())`, etc., as defined by HQL + * Also the deprecated 'maxindex()', 'maxelement()', 'minindex()', 'minelement()' functions from old HQL + */ +collectionAggregateFunction + : (MAX|MIN|SUM|AVG) LEFT_PAREN elementsValuesQuantifier LEFT_PAREN path RIGHT_PAREN RIGHT_PAREN # ElementAggregateFunction + | (MAX|MIN|SUM|AVG) LEFT_PAREN indicesKeysQuantifier LEFT_PAREN path RIGHT_PAREN RIGHT_PAREN # IndexAggregateFunction + | (MAXELEMENT|MINELEMENT) LEFT_PAREN path RIGHT_PAREN # ElementAggregateFunction + | (MAXINDEX|MININDEX) LEFT_PAREN path RIGHT_PAREN # IndexAggregateFunction + ; + +/** + * To accommodate the misuse of elements() and indices() in the select clause + * + * (At some stage in the history of HQL, someone mixed them up with value() and index(), + * and so we have tests that insist they're interchangeable. Ugh.) + */ +collectionFunctionMisuse + : elementsValuesQuantifier LEFT_PAREN path RIGHT_PAREN + | indicesKeysQuantifier LEFT_PAREN path RIGHT_PAREN + ; + +/** + * The special 'every()', 'all()', 'any()' and 'some()' functions defined by HQL + * + * May be applied to a subquery or collection reference, or may occur as an aggregate function in the 'select' clause + */ aggregateFunction - : avgFunction - | sumFunction - | minFunction - | maxFunction - | countFunction + : everyFunction + | anyFunction + | listaggFunction ; -avgFunction - : AVG LEFT_PAREN DISTINCT? expression RIGHT_PAREN +/** + * The functions 'every()' and 'all()' are synonyms + */ +everyFunction + : everyAllQuantifier LEFT_PAREN predicate RIGHT_PAREN filterClause? overClause? + | everyAllQuantifier LEFT_PAREN subquery RIGHT_PAREN + | everyAllQuantifier collectionQuantifier LEFT_PAREN simplePath RIGHT_PAREN ; -sumFunction - : SUM LEFT_PAREN DISTINCT? expression RIGHT_PAREN +/** + * The functions 'any()' and 'some()' are synonyms + */ +anyFunction + : anySomeQuantifier LEFT_PAREN predicate RIGHT_PAREN filterClause? overClause? + | anySomeQuantifier LEFT_PAREN subquery RIGHT_PAREN + | anySomeQuantifier collectionQuantifier LEFT_PAREN simplePath RIGHT_PAREN ; -minFunction - : MIN LEFT_PAREN DISTINCT? expression RIGHT_PAREN +everyAllQuantifier + : EVERY + | ALL + ; + +anySomeQuantifier + : ANY + | SOME + ; + +/** + * The 'listagg()' ordered set-aggregate function + */ +listaggFunction + : LISTAGG LEFT_PAREN DISTINCT? expressionOrPredicate COMMA expressionOrPredicate onOverflowClause? RIGHT_PAREN + withinGroupClause? filterClause? overClause? ; -maxFunction - : MAX LEFT_PAREN DISTINCT? expression RIGHT_PAREN +/** + * A 'on overflow' clause: what to do when the text data type used for 'listagg' overflows + */ +onOverflowClause + : ON OVERFLOW (ERROR | TRUNCATE expression? (WITH|WITHOUT) COUNT) ; -countFunction - : COUNT LEFT_PAREN DISTINCT? (expression | ASTERISK) RIGHT_PAREN +/** + * A 'within group' clause: defines the order in which the ordered set-aggregate function should work + */ +withinGroupClause + : WITHIN GROUP LEFT_PAREN orderByClause RIGHT_PAREN ; -standardFunction - : castFunction - | concatFunction - | substringFunction - | trimFunction - | upperFunction - | lowerFunction - | lengthFunction - | locateFunction - | absFunction - | sqrtFunction - | modFunction - | strFunction - | currentDateFunction - | currentTimeFunction - | currentTimestampFunction - | extractFunction - | positionFunction - | charLengthFunction - | octetLengthFunction - | bitLengthFunction +/** + * A 'filter' clause: a restriction applied to an aggregate function + */ +filterClause + : FILTER LEFT_PAREN whereClause RIGHT_PAREN + ; + +/** + * A `nulls` clause: what should a value access window function do when encountering a `null` + */ +nullsClause + : RESPECT NULLS + | IGNORE NULLS ; +/** + * A `nulls` clause: what should a value access window function do when encountering a `null` + */ +nthSideClause + : FROM FIRST + | FROM LAST + ; +/** + * A 'over' clause: the specification of a window within which the function should act + */ +overClause + : OVER LEFT_PAREN partitionClause? orderByClause? frameClause? RIGHT_PAREN + ; + +/** + * A 'partition' clause: the specification the group within which a function should act in a window + */ +partitionClause + : PARTITION BY expression (COMMA expression)* + ; + +/** + * A 'frame' clause: the specification the content of the window + */ +frameClause + : (RANGE|ROWS|GROUPS) frameStart frameExclusion? + | (RANGE|ROWS|GROUPS) BETWEEN frameStart AND frameEnd frameExclusion? + ; + +/** + * The start of the window content + */ +frameStart + : CURRENT ROW + | UNBOUNDED PRECEDING + | expression PRECEDING + | expression FOLLOWING + ; + +/** + * The end of the window content + */ +frameEnd + : CURRENT ROW + | UNBOUNDED FOLLOWING + | expression PRECEDING + | expression FOLLOWING + ; + +/** + * A 'exclusion' clause: the specification what to exclude from the window content + */ +frameExclusion + : EXCLUDE CURRENT ROW + | EXCLUDE GROUP + | EXCLUDE TIES + | EXCLUDE NO OTHERS + ; + +/** + * Any function with an irregular syntax for the argument list + * + * These are all inspired by the syntax of ANSI SQL + */ +standardFunction + : castFunction + | extractFunction + | truncFunction + | formatFunction + | collateFunction + | substringFunction + | overlayFunction + | trimFunction + | padFunction + | positionFunction + | currentDateFunction + | currentTimeFunction + | currentTimestampFunction + | instantFunction + | localDateFunction + | localTimeFunction + | localDateTimeFunction + | offsetDateTimeFunction + | cube + | rollup + ; + +/** + * The 'cast()' function for typecasting + */ castFunction : CAST LEFT_PAREN expression AS castTarget RIGHT_PAREN ; +/** + * The target type for a typecast: a typename, together with length or precision/scale + */ castTarget - // todo (6.0) : should allow either - // - named cast (IDENTIFIER) - // - JavaTypeDescriptorRegistry (imported) key - // - java.sql.Types field NAME (alias for its value as a coded cast) - // - "pass through" - // - coded cast (INTEGER_LITERAL) - // - SqlTypeDescriptorRegistry key - : IDENTIFIER + : castTargetType (LEFT_PAREN INTEGER_LITERAL (COMMA INTEGER_LITERAL)? RIGHT_PAREN)? ; -concatFunction - : CONCAT LEFT_PAREN expression (COMMA expression)+ RIGHT_PAREN +/** + * The name of the target type in a typecast + * + * Like the 'entityName' rule, we have a specialized dotIdentifierSequence rule + */ +castTargetType + returns [String fullTargetName] + : (i=identifier { $fullTargetName = _localctx.i.getText(); }) (DOT c=identifier { $fullTargetName += ("." + _localctx.c.getText() ); })* ; +/** + * The two formats for the 'substring() function: one defined by JPQL, the other by ANSI SQL + */ substringFunction - : (SUBSTRING | SUBSTR) LEFT_PAREN expression COMMA substringFunctionStartArgument (COMMA substringFunctionLengthArgument)? RIGHT_PAREN + : SUBSTRING LEFT_PAREN expression COMMA substringFunctionStartArgument (COMMA substringFunctionLengthArgument)? RIGHT_PAREN + | SUBSTRING LEFT_PAREN expression FROM substringFunctionStartArgument (FOR substringFunctionLengthArgument)? RIGHT_PAREN ; substringFunctionStartArgument @@ -607,6 +1341,9 @@ substringFunctionLengthArgument : expression ; +/** + * The ANSI SQL-style 'trim()' function + */ trimFunction : TRIM LEFT_PAREN trimSpecification? trimCharacter? FROM? expression RIGHT_PAREN ; @@ -618,205 +1355,414 @@ trimSpecification ; trimCharacter - : CHARACTER_LITERAL | STRING_LITERAL - ; - -upperFunction - : UPPER LEFT_PAREN expression RIGHT_PAREN + : STRING_LITERAL ; -lowerFunction - : LOWER LEFT_PAREN expression RIGHT_PAREN +/** + * A 'pad()' function inspired by 'trim()' + */ +padFunction + : PAD LEFT_PAREN expression WITH padLength padSpecification padCharacter? RIGHT_PAREN ; -lengthFunction - : LENGTH LEFT_PAREN expression RIGHT_PAREN +padSpecification + : LEADING + | TRAILING ; -locateFunction - : LOCATE LEFT_PAREN locateFunctionSubstrArgument COMMA locateFunctionStringArgument (COMMA locateFunctionStartArgument)? RIGHT_PAREN +padCharacter + : STRING_LITERAL ; -locateFunctionSubstrArgument +padLength : expression ; -locateFunctionStringArgument - : expression +/** + * The ANSI SQL-style 'overlay()' function + */ +overlayFunction + : OVERLAY LEFT_PAREN overlayFunctionStringArgument PLACING overlayFunctionReplacementArgument FROM overlayFunctionStartArgument (FOR overlayFunctionLengthArgument)? RIGHT_PAREN ; -locateFunctionStartArgument +overlayFunctionStringArgument : expression ; -absFunction - : ABS LEFT_PAREN expression RIGHT_PAREN - ; - -sqrtFunction - : SQRT LEFT_PAREN expression RIGHT_PAREN - ; - -modFunction - : MOD LEFT_PAREN modDividendArgument COMMA modDivisorArgument RIGHT_PAREN +overlayFunctionReplacementArgument + : expression ; -strFunction - : STR LEFT_PAREN expression RIGHT_PAREN - ; - -modDividendArgument +overlayFunctionStartArgument : expression ; -modDivisorArgument +overlayFunctionLengthArgument : expression ; +/** + * The deprecated current_date function required by JPQL + */ currentDateFunction : CURRENT_DATE (LEFT_PAREN RIGHT_PAREN)? + | CURRENT DATE ; +/** + * The deprecated current_time function required by JPQL + */ currentTimeFunction : CURRENT_TIME (LEFT_PAREN RIGHT_PAREN)? + | CURRENT TIME ; +/** + * The deprecated current_timestamp function required by JPQL + */ currentTimestampFunction : CURRENT_TIMESTAMP (LEFT_PAREN RIGHT_PAREN)? + | CURRENT TIMESTAMP + ; + +/** + * The instant function, and deprecated current_instant function + */ +instantFunction + : CURRENT_INSTANT (LEFT_PAREN RIGHT_PAREN)? //deprecated legacy syntax + | INSTANT + ; + +/** + * The 'local datetime' function (or literal if you prefer) + */ +localDateTimeFunction + : LOCAL_DATETIME (LEFT_PAREN RIGHT_PAREN)? + | LOCAL DATETIME + ; + +/** + * The 'offset datetime' function (or literal if you prefer) + */ +offsetDateTimeFunction + : OFFSET_DATETIME (LEFT_PAREN RIGHT_PAREN)? + | OFFSET DATETIME + ; + +/** + * The 'local date' function (or literal if you prefer) + */ +localDateFunction + : LOCAL_DATE (LEFT_PAREN RIGHT_PAREN)? + | LOCAL DATE ; +/** + * The 'local time' function (or literal if you prefer) + */ +localTimeFunction + : LOCAL_TIME (LEFT_PAREN RIGHT_PAREN)? + | LOCAL TIME + ; + +/** + * The 'format()' function for formatting dates and times according to a pattern + */ +formatFunction + : FORMAT LEFT_PAREN expression AS format RIGHT_PAREN + ; + +/** + * A format pattern, with a syntax inspired by by java.time.format.DateTimeFormatter + * + * see 'Dialect.appendDatetimeFormat()' + */ +format + : STRING_LITERAL + ; + +/** + * The 'extract()' function for extracting fields of dates, times, and datetimes + */ extractFunction : EXTRACT LEFT_PAREN extractField FROM expression RIGHT_PAREN + | datetimeField LEFT_PAREN expression RIGHT_PAREN + ; + +/** + * The 'trunc()' function for truncating both numeric and datetime values + */ +truncFunction + : (TRUNC | TRUNCATE) LEFT_PAREN expression (COMMA (datetimeField | expression))? RIGHT_PAREN ; +/** + * A field that may be extracted from a date, time, or datetime + */ extractField : datetimeField + | dayField + | weekField | timeZoneField + | dateOrTimeField ; datetimeField - : nonSecondDatetimeField - | SECOND - ; - -nonSecondDatetimeField : YEAR | MONTH | DAY + | WEEK + | QUARTER | HOUR | MINUTE + | SECOND + | NANOSECOND + | EPOCH + ; + +dayField + : DAY OF MONTH + | DAY OF WEEK + | DAY OF YEAR + ; + +weekField + : WEEK OF MONTH + | WEEK OF YEAR ; timeZoneField - : TIMEZONE_HOUR - | TIMEZONE_MINUTE + : OFFSET (HOUR | MINUTE)? + | TIMEZONE_HOUR | TIMEZONE_MINUTE ; -positionFunction - : POSITION LEFT_PAREN positionSubstrArgument IN positionStringArgument RIGHT_PAREN +dateOrTimeField + : DATE + | TIME ; -positionSubstrArgument - : expression +/** + * The ANSI SQL-style 'position()' function + */ +positionFunction + : POSITION LEFT_PAREN positionFunctionPatternArgument IN positionFunctionStringArgument RIGHT_PAREN ; -positionStringArgument +positionFunctionPatternArgument : expression ; -charLengthFunction - : CAST LEFT_PAREN expression RIGHT_PAREN +positionFunctionStringArgument + : expression ; -octetLengthFunction - : OCTET_LENGTH LEFT_PAREN expression RIGHT_PAREN +/** + * The 'cube()' function specific to the 'group by' clause + */ +cube + : CUBE LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)* RIGHT_PAREN ; -bitLengthFunction - : BIT_LENGTH LEFT_PAREN expression RIGHT_PAREN +/** + * The 'rollup()' function specific to the 'group by' clause + */ +rollup + : ROLLUP LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)* RIGHT_PAREN ; /** - * The `identifier` is used to provide "keyword as identifier" handling. + * Support for "soft" keywords which may be used as identifiers + * + * The 'identifier' rule is used to provide "keyword as identifier" handling. * * The lexer hands us recognized keywords using their specific tokens. This is important * for the recognition of sqm structure, especially in terms of performance! * - * However we want to continue to allow users to use mopst keywords as identifiers (e.g., attribute names). + * However we want to continue to allow users to use most keywords as identifiers (e.g., attribute names). * This parser rule helps with that. Here we expect that the caller already understands their * context enough to know that keywords-as-identifiers are allowed. */ -identifier + // All except the possible optional following keywords LEFT, RIGHT, INNER, FULL, OUTER + nakedIdentifier : IDENTIFIER - | (ABS - | ALL + | QUOTED_IDENTIFIER + | (ALL | AND | ANY | AS | ASC | AVG - | BY | BETWEEN - | BIT_LENGTH | BOTH + | BREADTH + | BY + | CASE | CAST - | COALESCE | COLLATE - | CONCAT | COUNT | CROSS + | CUBE + | CURRENT + | CURRENT_DATE + | CURRENT_INSTANT + | CURRENT_TIME + | CURRENT_TIMESTAMP + | CYCLE + | DATE + | DATETIME | DAY + | DEFAULT | DELETE + | DEPTH | DESC | DISTINCT + | ELEMENT | ELEMENTS + | ELSE + | EMPTY + | END | ENTRY + | EPOCH + | ERROR + | ESCAPE + | EVERY + | EXCEPT + | EXCLUDE + | EXISTS + | EXTRACT + | FETCH + | FILTER + | FIRST + | FOLLOWING + | FOR + | FORMAT | FROM - | FULL +// | FULL | FUNCTION | GROUP + | GROUPS + | HAVING | HOUR + | ID + | IGNORE + | ILIKE | IN | INDEX - | INNER + | INDICES +// | INNER | INSERT + | INSTANT + | INTERSECT + | INTO + | IS | JOIN | KEY + | KEYS + | LAST + | LATERAL | LEADING - | LEFT - | LENGTH +// | LEFT | LIKE + | LIMIT | LIST - | LOWER + | LISTAGG + | LOCAL + | LOCAL_DATE + | LOCAL_DATETIME + | LOCAL_TIME | MAP + | MATERIALIZED | MAX + | MAXELEMENT + | MAXINDEX + | MEMBER + | MICROSECOND + | MILLISECOND | MIN + | MINELEMENT + | MININDEX | MINUTE - | MEMBER | MONTH + | NANOSECOND + | NATURALID + | NEW + | NEXT + | NO + | NOT + | NULLS | OBJECT + | OF + | OFFSET + | OFFSET_DATETIME | ON + | ONLY | OR | ORDER - | OUTER + | OTHERS +// | OUTER + | OVER + | OVERFLOW + | OVERLAY + | PAD + | PARTITION + | PERCENT + | PLACING | POSITION - | RIGHT - | SELECT + | PRECEDING + | QUARTER + | RANGE + | RESPECT +// | RIGHT + | ROLLUP + | ROW + | ROWS + | SEARCH | SECOND + | SELECT | SET - | SQRT - | STR + | SIZE + | SOME | SUBSTRING | SUM + | THEN + | TIES + | TIME + | TIMESTAMP + | TIMEZONE_HOUR + | TIMEZONE_MINUTE + | TO | TRAILING | TREAT + | TRIM + | TRUNC + | TRUNCATE + | TYPE + | UNBOUNDED + | UNION | UPDATE - | UPPER + | USING | VALUE + | VALUES + | VERSION + | VERSIONED + | WEEK + | WHEN | WHERE | WITH - | YEAR) { - logUseOfReservedWordAsIdentifier(getCurrentToken()); + | WITHIN + | WITHOUT + | YEAR + | ZONED) { + logUseOfReservedWordAsIdentifier( getCurrentToken() ); + } + ; +identifier + : nakedIdentifier + | (FULL + | INNER + | LEFT + | OUTER + | RIGHT) { + logUseOfReservedWordAsIdentifier( getCurrentToken() ); } ; - diff --git a/extensions/panache/panacheql/src/test/java/io/quarkus/panacheql/LexerTest.java b/extensions/panache/panacheql/src/test/java/io/quarkus/panacheql/LexerTest.java index 495629e822eac2..258cb1d9718295 100644 --- a/extensions/panache/panacheql/src/test/java/io/quarkus/panacheql/LexerTest.java +++ b/extensions/panache/panacheql/src/test/java/io/quarkus/panacheql/LexerTest.java @@ -8,10 +8,10 @@ import io.quarkus.panacheql.internal.HqlLexer; import io.quarkus.panacheql.internal.HqlParser; import io.quarkus.panacheql.internal.HqlParser.AndPredicateContext; -import io.quarkus.panacheql.internal.HqlParser.EqualityPredicateContext; +import io.quarkus.panacheql.internal.HqlParser.ComparisonPredicateContext; +import io.quarkus.panacheql.internal.HqlParser.GeneralPathExpressionContext; import io.quarkus.panacheql.internal.HqlParser.IsNullPredicateContext; import io.quarkus.panacheql.internal.HqlParser.LiteralExpressionContext; -import io.quarkus.panacheql.internal.HqlParser.PathExpressionContext; import io.quarkus.panacheql.internal.HqlParser.PredicateContext; import io.quarkus.panacheql.internal.HqlParserBaseVisitor; @@ -47,8 +47,11 @@ public String visitIsNullPredicate(IsNullPredicateContext ctx) { } @Override - public String visitEqualityPredicate(EqualityPredicateContext ctx) { - return ctx.expression(0).accept(this) + " == " + ctx.expression(1).accept(this); + public String visitComparisonPredicate(ComparisonPredicateContext ctx) { + if (ctx.comparisonOperator().EQUAL() != null) { + return ctx.expression(0).accept(this) + " == " + ctx.expression(1).accept(this); + } + return super.visitComparisonPredicate(ctx); } @Override @@ -57,7 +60,7 @@ public String visitLiteralExpression(LiteralExpressionContext ctx) { } @Override - public String visitPathExpression(PathExpressionContext ctx) { + public String visitGeneralPathExpression(GeneralPathExpressionContext ctx) { return ctx.getText(); } }; diff --git a/extensions/pom.xml b/extensions/pom.xml index f405528ad17822..5dd8c802a2f680 100644 --- a/extensions/pom.xml +++ b/extensions/pom.xml @@ -38,6 +38,7 @@ vertx-http undertow websockets + websockets-next webjars-locator resteasy-reactive reactive-routes @@ -223,49 +224,18 @@ maven-enforcer-plugin - enforce-no-runtime-deps + enforce enforce - + - - - io.quarkus - quarkus-enforcer-rules - ${project.version} - - - - - - com.gradle - gradle-enterprise-maven-extension - - - - - - maven-compiler-plugin - - the extension config doc generation tool shares data across all extensions - - - - - - - - diff --git a/extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/DependentBeanJobTest.java b/extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/DependentBeanJobTest.java new file mode 100644 index 00000000000000..9a2943c1ab78ac --- /dev/null +++ b/extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/DependentBeanJobTest.java @@ -0,0 +1,216 @@ +package io.quarkus.quartz.test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import jakarta.annotation.PostConstruct; +import jakarta.annotation.PreDestroy; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.enterprise.context.Dependent; +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.quartz.Job; +import org.quartz.JobBuilder; +import org.quartz.JobDetail; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.quartz.SimpleScheduleBuilder; +import org.quartz.Trigger; +import org.quartz.TriggerBuilder; + +import io.quarkus.test.QuarkusUnitTest; + +public class DependentBeanJobTest { + + @RegisterExtension + static final QuarkusUnitTest test = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(Service.class, MyJob.class, RefiringJob.class) + .addAsResource(new StringAsset("quarkus.quartz.start-mode=forced"), + "application.properties")); + + @Inject + Scheduler quartz; + + @Inject + Service service; + + @Test + public void testDependentBeanJobDestroyed() throws SchedulerException, InterruptedException { + // prepare latches, schedule 10 one-off jobs, assert + CountDownLatch execLatch = service.initExecuteLatch(10); + CountDownLatch constructLatch = service.initConstructLatch(10); + CountDownLatch destroyedLatch = service.initDestroyedLatch(10); + for (int i = 0; i < 10; i++) { + Trigger trigger = TriggerBuilder.newTrigger() + .withIdentity("myTrigger" + i, "myGroup") + .startNow() + .build(); + JobDetail job = JobBuilder.newJob(MyJob.class) + .withIdentity("myJob" + i, "myGroup") + .build(); + quartz.scheduleJob(job, trigger); + } + assertTrue(execLatch.await(2, TimeUnit.SECONDS), "Latch count: " + execLatch.getCount()); + assertTrue(constructLatch.await(2, TimeUnit.SECONDS), "Latch count: " + constructLatch.getCount()); + assertTrue(destroyedLatch.await(2, TimeUnit.SECONDS), "Latch count: " + destroyedLatch.getCount()); + + // now try the same with repeating job triggering three times + execLatch = service.initExecuteLatch(3); + constructLatch = service.initConstructLatch(3); + destroyedLatch = service.initDestroyedLatch(3); + JobDetail job = JobBuilder.newJob(MyJob.class) + .withIdentity("myRepeatingJob", "myGroup") + .build(); + Trigger trigger = TriggerBuilder.newTrigger() + .withIdentity("myRepeatingTrigger", "myGroup") + .startNow() + .withSchedule( + SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMilliseconds(333) + .withRepeatCount(2)) + .build(); + quartz.scheduleJob(job, trigger); + + assertTrue(execLatch.await(2, TimeUnit.SECONDS), "Latch count: " + execLatch.getCount()); + assertTrue(constructLatch.await(2, TimeUnit.SECONDS), "Latch count: " + constructLatch.getCount()); + assertTrue(destroyedLatch.await(2, TimeUnit.SECONDS), "Latch count: " + destroyedLatch.getCount()); + } + + @Test + public void testDependentBeanJobWithRefire() throws SchedulerException, InterruptedException { + // 5 one-off jobs should trigger construction/execution/destruction 10 times in total + CountDownLatch execLatch = service.initExecuteLatch(10); + CountDownLatch constructLatch = service.initConstructLatch(10); + CountDownLatch destroyedLatch = service.initDestroyedLatch(10); + for (int i = 0; i < 5; i++) { + Trigger trigger = TriggerBuilder.newTrigger() + .withIdentity("myTrigger" + i, "myRefiringGroup") + .startNow() + .build(); + JobDetail job = JobBuilder.newJob(RefiringJob.class) + .withIdentity("myJob" + i, "myRefiringGroup") + .build(); + quartz.scheduleJob(job, trigger); + } + assertTrue(execLatch.await(2, TimeUnit.SECONDS), "Latch count: " + execLatch.getCount()); + assertTrue(constructLatch.await(2, TimeUnit.SECONDS), "Latch count: " + constructLatch.getCount()); + assertTrue(destroyedLatch.await(2, TimeUnit.SECONDS), "Latch count: " + destroyedLatch.getCount()); + + // repeating job triggering three times; we expect six beans to exist for that due to refires + execLatch = service.initExecuteLatch(6); + constructLatch = service.initConstructLatch(6); + destroyedLatch = service.initDestroyedLatch(6); + JobDetail job = JobBuilder.newJob(RefiringJob.class) + .withIdentity("myRepeatingJob", "myRefiringGroup") + .build(); + Trigger trigger = TriggerBuilder.newTrigger() + .withIdentity("myRepeatingTrigger", "myRefiringGroup") + .startNow() + .withSchedule( + SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMilliseconds(333) + .withRepeatCount(2)) + .build(); + quartz.scheduleJob(job, trigger); + + assertTrue(execLatch.await(2, TimeUnit.SECONDS), "Latch count: " + execLatch.getCount()); + assertTrue(constructLatch.await(2, TimeUnit.SECONDS), "Latch count: " + constructLatch.getCount()); + assertTrue(destroyedLatch.await(2, TimeUnit.SECONDS), "Latch count: " + destroyedLatch.getCount()); + } + + @ApplicationScoped + public static class Service { + + volatile CountDownLatch executeLatch; + volatile CountDownLatch constructedLatch; + volatile CountDownLatch destroyedLatch; + + public CountDownLatch initExecuteLatch(int latchCountdown) { + this.executeLatch = new CountDownLatch(latchCountdown); + return executeLatch; + } + + public CountDownLatch initConstructLatch(int latchCountdown) { + this.constructedLatch = new CountDownLatch(latchCountdown); + return constructedLatch; + } + + public CountDownLatch initDestroyedLatch(int latchCountdown) { + this.destroyedLatch = new CountDownLatch(latchCountdown); + return destroyedLatch; + } + + public void execute() { + executeLatch.countDown(); + } + + public void constructedLatch() { + constructedLatch.countDown(); + } + + public void destroyedLatch() { + destroyedLatch.countDown(); + } + + } + + @Dependent + static class MyJob implements Job { + + @Inject + Service service; + + @PostConstruct + void postConstruct() { + service.constructedLatch(); + } + + @PreDestroy + void preDestroy() { + service.destroyedLatch(); + } + + @Override + public void execute(JobExecutionContext context) throws JobExecutionException { + service.execute(); + } + } + + @Dependent + static class RefiringJob implements Job { + + @Inject + Service service; + + @PostConstruct + void postConstruct() { + service.constructedLatch(); + } + + @PreDestroy + void preDestroy() { + service.destroyedLatch(); + } + + @Override + public void execute(JobExecutionContext context) throws JobExecutionException { + if (context.getRefireCount() == 0) { + service.execute(); + // request re-fire; we expect a new dependent bean to be used for that + throw new JobExecutionException("Refiring job", true); + } else { + service.execute(); + // no re-fire the second time + throw new JobExecutionException("Job was re-fired successfully", false); + } + } + } +} diff --git a/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/CdiAwareJob.java b/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/CdiAwareJob.java new file mode 100644 index 00000000000000..23f40652349064 --- /dev/null +++ b/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/CdiAwareJob.java @@ -0,0 +1,37 @@ +package io.quarkus.quartz.runtime; + +import jakarta.enterprise.context.Dependent; +import jakarta.enterprise.inject.Instance; + +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.quartz.Scheduler; +import org.quartz.spi.TriggerFiredBundle; + +/** + * An abstraction allowing proper destruction of Job instances in case they are dependent beans. + * According to {@link org.quartz.spi.JobFactory#newJob(TriggerFiredBundle, Scheduler)}, a new job instance is created for every + * trigger. + * We will therefore create a new dependent bean for every trigger and destroy it afterwards. + */ +class CdiAwareJob implements Job { + + private final Instance jobInstance; + + public CdiAwareJob(Instance jobInstance) { + this.jobInstance = jobInstance; + } + + @Override + public void execute(JobExecutionContext context) throws JobExecutionException { + Instance.Handle handle = jobInstance.getHandle(); + try { + handle.get().execute(context); + } finally { + if (handle.getBean().getScope().equals(Dependent.class)) { + handle.destroy(); + } + } + } +} diff --git a/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java b/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java index 60e76ea042e8e7..7ea756c6a5fb46 100644 --- a/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java +++ b/extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzSchedulerImpl.java @@ -1243,10 +1243,10 @@ public Job newJob(TriggerFiredBundle bundle, org.quartz.Scheduler Scheduler) thr // Get the original class from an intercepted bean class jobClass = (Class) jobClass.getSuperclass(); } - Instance instance = jobs.select(jobClass); + Instance instance = jobs.select(jobClass); if (instance.isResolvable()) { // This is a job backed by a CDI bean - return jobWithSpanWrapper((Job) instance.get()); + return jobWithSpanWrapper(new CdiAwareJob(instance)); } // Instantiate a plain job class return jobWithSpanWrapper(super.newJob(bundle, Scheduler)); diff --git a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java index c2fc7b73931673..e2ab7c880eaed0 100644 --- a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java +++ b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java @@ -41,6 +41,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import jakarta.inject.Singleton; + import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.AnnotationTarget.Kind; @@ -77,6 +79,7 @@ import io.quarkus.deployment.ApplicationArchive; import io.quarkus.deployment.Feature; import io.quarkus.deployment.GeneratedClassGizmoAdaptor; +import io.quarkus.deployment.IsTest; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.Record; @@ -109,6 +112,7 @@ import io.quarkus.qute.ParameterDeclaration; import io.quarkus.qute.ParserHelper; import io.quarkus.qute.ParserHook; +import io.quarkus.qute.RenderedResults; import io.quarkus.qute.ResultNode; import io.quarkus.qute.SectionHelper; import io.quarkus.qute.SectionHelperFactory; @@ -148,6 +152,7 @@ import io.quarkus.qute.runtime.extensions.OrOperatorTemplateExtensions; import io.quarkus.qute.runtime.extensions.StringTemplateExtensions; import io.quarkus.qute.runtime.extensions.TimeTemplateExtensions; +import io.quarkus.qute.runtime.test.RenderedResultsCreator; import io.quarkus.runtime.util.StringUtil; public class QuteProcessor { @@ -874,6 +879,18 @@ void validateCheckedFragments(List validatio } } + @BuildStep(onlyIf = IsTest.class) + SyntheticBeanBuildItem registerRenderedResults(QuteConfig config) { + if (config.testMode.recordRenderedResults) { + return SyntheticBeanBuildItem.configure(RenderedResults.class) + .unremovable() + .scope(Singleton.class) + .creator(RenderedResultsCreator.class) + .done(); + } + return null; + } + @SuppressWarnings("incomplete-switch") private static String getCheckedTemplateParameterTypeName(Type type) { switch (type.kind()) { diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/FooTemplates.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/FooTemplates.java new file mode 100644 index 00000000000000..3aa4576ca31684 --- /dev/null +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/FooTemplates.java @@ -0,0 +1,12 @@ +package io.quarkus.qute.deployment.test; + +import io.quarkus.qute.CheckedTemplate; +import io.quarkus.qute.TemplateInstance; + +@CheckedTemplate +public class FooTemplates { + + static native TemplateInstance foo(String name); + + static native TemplateInstance foo$bar(); +} diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/RenderedResultsDisabledTest.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/RenderedResultsDisabledTest.java new file mode 100644 index 00000000000000..867778d4ee832c --- /dev/null +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/RenderedResultsDisabledTest.java @@ -0,0 +1,33 @@ +package io.quarkus.qute.deployment.test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import jakarta.enterprise.inject.Instance; +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.qute.RenderedResults; +import io.quarkus.test.QuarkusUnitTest; + +public class RenderedResultsDisabledTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot(root -> root + .addClasses(SimpleBean.class) + .addAsResource(new StringAsset("quarkus.qute.test-mode.record-rendered-results=false"), + "application.properties") + .addAsResource(new StringAsset("{name}"), "templates/foo.txt")); + + @Inject + Instance renderedResults; + + @Test + public void testRenderedResultsNotRegistered() { + assertTrue(renderedResults.isUnsatisfied()); + } + +} diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/RenderedResultsTest.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/RenderedResultsTest.java new file mode 100644 index 00000000000000..3c15c854ebc249 --- /dev/null +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/RenderedResultsTest.java @@ -0,0 +1,98 @@ +package io.quarkus.qute.deployment.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.qute.RenderedResults; +import io.quarkus.qute.RenderedResults.RenderedResult; +import io.quarkus.qute.TemplateInstance; +import io.quarkus.qute.Variant; +import io.quarkus.test.QuarkusUnitTest; + +public class RenderedResultsTest { + + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withApplicationRoot(root -> root + .addClasses(SimpleBean.class, FooTemplates.class) + .addAsResource(new StringAsset("quarkus.qute.suffixes=txt,html"), "application.properties") + .addAsResource(new StringAsset("{name}{#fragment id=bar rendered=false}bar{/fragment}"), + "templates/foo.txt") + .addAsResource(new StringAsset("

{name}{#fragment id=bar rendered=false}bar{/fragment}

"), + "templates/foo.html")); + + @Inject + RenderedResults renderedResults; + + @Inject + SimpleBean bean; + + @Test + public void testInjectedTemplate() throws InterruptedException { + assertResults(() -> bean.fooInstance().data("name", "oof").render(), "foo.txt", "oof"); + } + + @Test + public void testInjectedTemplateSelectedVariant() throws InterruptedException { + assertResults(() -> bean.fooInstance() + .setAttribute(TemplateInstance.SELECTED_VARIANT, Variant.forContentType(Variant.TEXT_HTML)) + .data("name", "oof") + .render(), "foo.html", "

oof

"); + } + + @Test + public void testTypesafeTemplate() throws InterruptedException { + assertResults(() -> FooTemplates.foo("oof").render(), "foo.txt", "oof"); + } + + @Test + public void testTypesafeFragment() throws InterruptedException { + assertResults(() -> FooTemplates.foo$bar().render(), "foo.txt$bar", "bar"); + } + + @Test + public void testTypesafeTemplateSelectedVariant() throws InterruptedException { + assertResults( + () -> FooTemplates.foo("oof") + .setAttribute(TemplateInstance.SELECTED_VARIANT, Variant.forContentType(Variant.TEXT_HTML)).render(), + "foo.html", "

oof

"); + } + + @Test + public void testTypesafeFragmentSelectedVariant() throws InterruptedException { + assertResults( + () -> FooTemplates.foo$bar() + .setAttribute(TemplateInstance.SELECTED_VARIANT, Variant.forContentType(Variant.TEXT_HTML)).render(), + "foo.html$bar", "bar"); + } + + private void assertResults(Supplier renderAction, String templateId, String expectedResult) + throws InterruptedException { + renderedResults.clear(); + assertEquals(expectedResult, renderAction.get()); + // Wait a little so that we can test the RenderedResult#timeout() + // Note that LocalDateTime.now() has precision of the system clock and it seems that windows has millisecond precision + TimeUnit.MILLISECONDS.sleep(50); + List results = renderedResults.getResults(templateId); + assertEquals(1, results.size(), renderedResults.toString()); + assertEquals(expectedResult, results.get(0).result()); + assertEquals(expectedResult, renderAction.get()); + results = renderedResults.getResults(templateId); + assertEquals(2, results.size(), renderedResults.toString()); + assertEquals(expectedResult, results.get(1).result()); + assertTrue(results.get(0).timestamp().isBefore(results.get(1).timestamp())); + renderedResults.clear(); + assertTrue(renderedResults.getResults(templateId).isEmpty()); + } + +} diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/SimpleBean.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/SimpleBean.java new file mode 100644 index 00000000000000..04439289c74508 --- /dev/null +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/test/SimpleBean.java @@ -0,0 +1,19 @@ +package io.quarkus.qute.deployment.test; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; + +import io.quarkus.qute.Template; +import io.quarkus.qute.TemplateInstance; + +@Singleton +public class SimpleBean { + + @Inject + Template foo; + + public TemplateInstance fooInstance() { + return foo.instance(); + } + +} diff --git a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/QuteConfig.java b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/QuteConfig.java index 5849bb84a5fdd4..ad59c9a174406a 100644 --- a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/QuteConfig.java +++ b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/QuteConfig.java @@ -89,9 +89,15 @@ public class QuteConfig { public Charset defaultCharset; /** - * Dev mode configuration. + * Development mode configuration. */ @ConfigItem public QuteDevModeConfig devMode; + /** + * Test mode configuration. + */ + @ConfigItem + public QuteTestModeConfig testMode; + } diff --git a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/QuteTestModeConfig.java b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/QuteTestModeConfig.java new file mode 100644 index 00000000000000..13d4dfa30d2943 --- /dev/null +++ b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/QuteTestModeConfig.java @@ -0,0 +1,17 @@ +package io.quarkus.qute.runtime; + +import io.quarkus.qute.RenderedResults; +import io.quarkus.runtime.annotations.ConfigGroup; +import io.quarkus.runtime.annotations.ConfigItem; + +@ConfigGroup +public class QuteTestModeConfig { + + /** + * By default, the rendering results of injected and type-safe templates are recorded in the managed + * {@link RenderedResults} which is registered as a CDI bean. + */ + @ConfigItem(defaultValue = "true") + public boolean recordRenderedResults; + +} \ No newline at end of file diff --git a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/TemplateProducer.java b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/TemplateProducer.java index 6f15bfa4bca361..1a7c6256a16646 100644 --- a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/TemplateProducer.java +++ b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/TemplateProducer.java @@ -18,6 +18,7 @@ import java.util.function.Predicate; import java.util.function.Supplier; +import jakarta.enterprise.inject.Instance; import jakarta.enterprise.inject.Produces; import jakarta.enterprise.inject.spi.AnnotatedParameter; import jakarta.enterprise.inject.spi.InjectionPoint; @@ -30,6 +31,8 @@ import io.quarkus.qute.Expression; import io.quarkus.qute.Location; import io.quarkus.qute.ParameterDeclaration; +import io.quarkus.qute.RenderedResults; +import io.quarkus.qute.ResultsCollectingTemplateInstance; import io.quarkus.qute.Template; import io.quarkus.qute.TemplateInstance; import io.quarkus.qute.TemplateInstanceBase; @@ -51,7 +54,10 @@ public class TemplateProducer { // In the dev mode, we need to keep track of injected templates so that we can clear the cached values private final List> injectedTemplates; - TemplateProducer(Engine engine, QuteContext context, ContentTypes contentTypes, LaunchMode launchMode) { + private final RenderedResults renderedResults; + + TemplateProducer(Engine engine, QuteContext context, ContentTypes contentTypes, LaunchMode launchMode, + Instance renderedResults) { this.engine = engine; Map templateVariants = new HashMap<>(); for (Entry> entry : context.getVariants().entrySet()) { @@ -60,6 +66,7 @@ public class TemplateProducer { templateVariants.put(entry.getKey(), var); } this.templateVariants = Collections.unmodifiableMap(templateVariants); + this.renderedResults = launchMode == LaunchMode.TEST ? renderedResults.get() : null; this.injectedTemplates = launchMode == LaunchMode.DEVELOPMENT ? Collections.synchronizedList(new ArrayList<>()) : null; LOGGER.debugf("Initializing Qute variant templates: %s", templateVariants); } @@ -122,7 +129,7 @@ public void clearInjectedTemplates() { } private Template newInjectableTemplate(String path) { - InjectableTemplate template = new InjectableTemplate(path, templateVariants, engine); + InjectableTemplate template = new InjectableTemplate(path, templateVariants, engine, renderedResults); if (injectedTemplates != null) { injectedTemplates.add(new WeakReference<>(template)); } @@ -142,8 +149,10 @@ static class InjectableTemplate implements Template { private final Engine engine; // Some methods may only work if a single template variant is found private final LazyValue