diff --git a/bom/application/pom.xml b/bom/application/pom.xml
index 47c9e6f40f370..b94f8b9ae7cde 100644
--- a/bom/application/pom.xml
+++ b/bom/application/pom.xml
@@ -35,7 +35,7 @@
1.30.0-alpha1.8.15.0.3.Final
- 1.11.1
+ 1.11.52.1.120.22.021.1
@@ -84,8 +84,8 @@
2.1.13.1.04.0.1
- 4.0.3
- 9.5
+ 4.0.4
+ 9.62.13.016.0.0.Final3.0-alpha-2
@@ -109,7 +109,7 @@
7.0.0.Final2.18.0.0.Final
- 8.10.2
+ 8.10.42.2.212.2.5.Final2.2.2.Final
@@ -167,12 +167,12 @@
3.2.04.2.03.0.2.Final
- 9.22.2
+ 9.22.33.0.34.24.04.24.0
- 2.1
+ 2.26.0.04.11.01.8.0
@@ -190,7 +190,7 @@
5.8.04.10.12.0.2.Final
- 22.0.4
+ 22.0.51.15.13.38.02.22.0
diff --git a/build-parent/pom.xml b/build-parent/pom.xml
index cf8f26151fa99..bd06d38ef306f 100644
--- a/build-parent/pom.xml
+++ b/build-parent/pom.xml
@@ -101,7 +101,7 @@
- 22.0.4
+ 22.0.519.0.3quay.io/keycloak/keycloak:${keycloak.version}quay.io/keycloak/keycloak:${keycloak.wildfly.version}-legacy
diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildChain.java b/core/builder/src/main/java/io/quarkus/builder/BuildChain.java
index e6c9d381e55f3..4e68cfd2d9e53 100644
--- a/core/builder/src/main/java/io/quarkus/builder/BuildChain.java
+++ b/core/builder/src/main/java/io/quarkus/builder/BuildChain.java
@@ -15,24 +15,17 @@
*/
public final class BuildChain {
private final Set initialIds;
- private final int initialSingleCount;
- private final int initialMultiCount;
private final Set finalIds;
private final List startSteps;
- private final Set consumed;
private final List providers;
private final int endStepCount;
private final ClassLoader classLoader;
- BuildChain(final int initialSingleCount, final int initialMultiCount, final Set startSteps,
- final Set consumed, BuildChainBuilder builder, final int endStepCount) {
+ BuildChain(final Set startSteps, BuildChainBuilder builder, final int endStepCount) {
providers = builder.getProviders();
initialIds = builder.getInitialIds();
finalIds = builder.getFinalIds();
- this.initialSingleCount = initialSingleCount;
- this.initialMultiCount = initialMultiCount;
this.startSteps = new ArrayList<>(startSteps);
- this.consumed = consumed;
this.endStepCount = endStepCount;
this.classLoader = builder.getClassLoader();
}
@@ -96,22 +89,10 @@ boolean hasInitial(final ItemId itemId) {
return initialIds.contains(itemId);
}
- int getInitialSingleCount() {
- return initialSingleCount;
- }
-
- int getInitialMultiCount() {
- return initialMultiCount;
- }
-
List getStartSteps() {
return startSteps;
}
- Set getConsumed() {
- return consumed;
- }
-
Set getFinalIds() {
return finalIds;
}
diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java b/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java
index 0578ba0c4bc34..7d72d220502cc 100644
--- a/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java
+++ b/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java
@@ -151,23 +151,50 @@ public void setClassLoader(ClassLoader classLoader) {
* @throws ChainBuildException if the chain could not be built
*/
public BuildChain build() throws ChainBuildException {
- final Set consumed = new HashSet<>();
- final Map mappedSteps = new HashMap<>();
- int initialSingleCount = 0;
- int initialMultiCount = 0;
- final Map steps = this.steps;
- // compile main produce/consume maps
- final Map> allConsumes = new HashMap<>();
+ final Set included = new HashSet<>(); // the set of steps already included to avoid duplicates
+ Map> dependencies = wireDependencies(included);
+
+ detectCycles(included, dependencies);
+
+ // recursively build all
+ final Set startSteps = new HashSet<>();
+ final Set endSteps = buildAllSteps(included, dependencies, startSteps);
+
+ outputGraph(startSteps, endSteps);
+ return new BuildChain(startSteps, this, endSteps.size());
+ }
+
+ private Map> wireDependencies(Set included)
+ throws ChainBuildException {
+ Map> allProduces = extractProducers();
+ final ArrayDeque toAdd = new ArrayDeque<>(); // the queue of steps to be added
+ for (ItemId finalId : finalIds) {
+ addItem(allProduces, included, toAdd, finalId);
+ }
+
+ // now recursively add producers of consumed items
+ Map> dependencies = new HashMap<>();
+ BuildStepBuilder stepBuilder;
+ while ((stepBuilder = toAdd.pollFirst()) != null) {
+ for (Map.Entry entry : stepBuilder.getConsumes().entrySet()) {
+ final Consume consume = entry.getValue();
+ final ItemId id = entry.getKey();
+ if (!consume.getFlags().contains(ConsumeFlag.OPTIONAL) && !id.isMulti()) {
+ if (!initialIds.contains(id) && !allProduces.containsKey(id)) {
+ throw new ChainBuildException("No producers for required item " + id);
+ }
+ }
+ // add every producer
+ addItem(allProduces, included, toAdd, id, dependencies.computeIfAbsent(stepBuilder, x -> new HashSet<>()));
+ }
+ }
+ return dependencies;
+ }
+
+ private Map> extractProducers() throws ChainBuildException {
final Map> allProduces = new HashMap<>();
- final Set initialIds = this.initialIds;
for (Map.Entry stepEntry : steps.entrySet()) {
final BuildStepBuilder stepBuilder = stepEntry.getKey();
- final Map stepConsumes = stepBuilder.getConsumes();
- for (Map.Entry entry : stepConsumes.entrySet()) {
- final ItemId id = entry.getKey();
- final List list = allConsumes.computeIfAbsent(id, x -> new ArrayList<>(4));
- list.add(entry.getValue());
- }
final Map stepProduces = stepBuilder.getProduces();
for (Map.Entry entry : stepProduces.entrySet()) {
final ItemId id = entry.getKey();
@@ -213,87 +240,7 @@ public BuildChain build() throws ChainBuildException {
list.add(toBeAdded);
}
}
- final Set included = new HashSet<>();
- // now begin to wire dependencies
- final Set finalIds = this.finalIds;
- final ArrayDeque toAdd = new ArrayDeque<>();
- final Set lastDependencies = new HashSet<>();
- for (ItemId finalId : finalIds) {
- addOne(allProduces, included, toAdd, finalId, lastDependencies);
- }
- // now recursively add producers of consumed items
- BuildStepBuilder stepBuilder;
- Map> dependencies = new HashMap<>();
- while ((stepBuilder = toAdd.pollFirst()) != null) {
- for (Map.Entry entry : stepBuilder.getConsumes().entrySet()) {
- final Consume consume = entry.getValue();
- final ItemId id = entry.getKey();
- if (!consume.getFlags().contains(ConsumeFlag.OPTIONAL) && !id.isMulti()) {
- if (!initialIds.contains(id) && !allProduces.containsKey(id)) {
- throw new ChainBuildException("No producers for required item " + id);
- }
- }
- // add every producer
- addOne(allProduces, included, toAdd, id, dependencies.computeIfAbsent(stepBuilder, x -> new HashSet<>()));
- }
- }
- // calculate dependents
- Map> dependents = new HashMap<>();
- for (Map.Entry> entry : dependencies.entrySet()) {
- final BuildStepBuilder dependent = entry.getKey();
- for (Produce produce : entry.getValue()) {
- dependents.computeIfAbsent(produce.getStepBuilder(), x -> new HashSet<>()).add(dependent);
- }
- }
- // detect cycles
- cycleCheck(included, new HashSet<>(), new HashSet<>(), dependencies, new ArrayDeque<>());
- // recursively build all
- final Set startSteps = new HashSet<>();
- final Set endSteps = new HashSet<>();
- for (BuildStepBuilder builder : included) {
- buildOne(builder, included, mappedSteps, dependents, dependencies, startSteps, endSteps);
- }
- if (GRAPH_OUTPUT != null && !GRAPH_OUTPUT.isEmpty()) {
- try (FileOutputStream fos = new FileOutputStream(GRAPH_OUTPUT)) {
- try (OutputStreamWriter osw = new OutputStreamWriter(fos, StandardCharsets.UTF_8)) {
- try (BufferedWriter writer = new BufferedWriter(osw)) {
- writer.write("digraph {");
- writer.newLine();
- writer.write(" node [shape=rectangle];");
- writer.newLine();
- writer.write(" rankdir=LR;");
- writer.newLine();
- writer.newLine();
- writer.write(" { rank = same; ");
- for (StepInfo startStep : startSteps) {
- writer.write(quoteString(startStep.getBuildStep().getId()));
- writer.write("; ");
- }
- writer.write("};");
- writer.newLine();
- writer.write(" { rank = same; ");
- for (StepInfo endStep : endSteps) {
- if (!startSteps.contains(endStep)) {
- writer.write(quoteString(endStep.getBuildStep().getId()));
- writer.write("; ");
- }
- }
- writer.write("};");
- writer.newLine();
- writer.newLine();
- final HashSet printed = new HashSet<>();
- for (StepInfo step : startSteps) {
- writeStep(writer, printed, step);
- }
- writer.write("}");
- writer.newLine();
- }
- }
- } catch (IOException ioe) {
- throw new RuntimeException("Failed to write debug graph output", ioe);
- }
- }
- return new BuildChain(initialSingleCount, initialMultiCount, startSteps, consumed, this, endSteps.size());
+ return allProduces;
}
private static void writeStep(final BufferedWriter writer, final HashSet printed, final StepInfo step)
@@ -322,21 +269,26 @@ private static void writeStep(final BufferedWriter writer, final HashSet builders, Set visited, Set checked,
+ private void detectCycles(Set builders, final Map> dependencies)
+ throws ChainBuildException {
+ detectCycles(builders, new HashSet<>(), new HashSet<>(), dependencies, new ArrayDeque<>());
+ }
+
+ private void detectCycles(Set builders, Set visited, Set checked,
final Map> dependencies, final Deque producedPath)
throws ChainBuildException {
for (BuildStepBuilder builder : builders) {
- cycleCheck(builder, visited, checked, dependencies, producedPath);
+ detectCycles(builder, visited, checked, dependencies, producedPath);
}
}
@@ -345,12 +297,12 @@ private void cycleCheckProduce(Set produceSet, Set vi
throws ChainBuildException {
for (Produce produce : produceSet) {
producedPath.add(produce);
- cycleCheck(produce.getStepBuilder(), visited, checked, dependencies, producedPath);
+ detectCycles(produce.getStepBuilder(), visited, checked, dependencies, producedPath);
producedPath.removeLast();
}
}
- private void cycleCheck(BuildStepBuilder builder, Set visited, Set checked,
+ private void detectCycles(BuildStepBuilder builder, Set visited, Set checked,
final Map> dependencies, final Deque producedPath)
throws ChainBuildException {
if (!checked.contains(builder)) {
@@ -384,41 +336,64 @@ private void cycleCheck(BuildStepBuilder builder, Set visited,
checked.add(builder);
}
- private void addOne(final Map> allProduces, final Set included,
- final ArrayDeque toAdd, final ItemId idToAdd, Set dependencies)
- throws ChainBuildException {
+ private void addItem(final Map> allProduces, final Set included,
+ final ArrayDeque toAdd, final ItemId idToAdd) {
+ addItem(allProduces, included, toAdd, idToAdd, null);
+ }
+
+ private void addItem(final Map> allProduces, final Set included,
+ final ArrayDeque toAdd, final ItemId idToAdd, final Set dependencies) {
+ boolean modified = produceItem(allProduces, included, toAdd, idToAdd, dependencies, false);
+ if (!modified) {
+ // if nobody has produced this item non-overridably, try again with overridable
+ produceItem(allProduces, included, toAdd, idToAdd, dependencies, true);
+ }
+ }
+
+ private static boolean produceItem(Map> allProduces, Set included,
+ ArrayDeque toAdd, ItemId idToAdd,
+ Set dependencies, boolean overrideable) {
boolean modified = false;
for (Produce produce : allProduces.getOrDefault(idToAdd, Collections.emptyList())) {
final BuildStepBuilder stepBuilder = produce.getStepBuilder();
// if overridable, add in second pass only if this pass didn't add any producers
- if (!produce.getFlags().contains(ProduceFlag.OVERRIDABLE)) {
+ if (overrideable == produce.getFlags().contains(ProduceFlag.OVERRIDABLE)) {
if (!produce.getFlags().contains(ProduceFlag.WEAK)) {
if (included.add(stepBuilder)) {
// recursively add
toAdd.addLast(stepBuilder);
}
}
- dependencies.add(produce);
+ if (dependencies != null) {
+ dependencies.add(produce);
+ }
modified = true;
}
}
- if (modified) {
- // someone has produced this item non-overridably
- return;
+ return modified;
+ }
+
+ private Set buildAllSteps(Set included, Map> dependencies,
+ Set startSteps) {
+ Map> dependents = calculateDependents(dependencies);
+ final Set endSteps = new HashSet<>();
+ final Map mappedSteps = new HashMap<>();
+ for (BuildStepBuilder builder : included) {
+ buildOne(builder, included, mappedSteps, dependents, dependencies, startSteps, endSteps);
}
- for (Produce produce : allProduces.getOrDefault(idToAdd, Collections.emptyList())) {
- final BuildStepBuilder stepBuilder = produce.getStepBuilder();
- // if overridable, add in this pass only if the first pass didn't add any producers
- if (produce.getFlags().contains(ProduceFlag.OVERRIDABLE)) {
- if (!produce.getFlags().contains(ProduceFlag.WEAK)) {
- if (included.add(stepBuilder)) {
- // recursively add
- toAdd.addLast(stepBuilder);
- }
- }
- dependencies.add(produce);
+ return endSteps;
+ }
+
+ private static Map> calculateDependents(
+ Map> dependencies) {
+ Map> dependents = new HashMap<>();
+ for (Map.Entry> entry : dependencies.entrySet()) {
+ final BuildStepBuilder dependent = entry.getKey();
+ for (Produce produce : entry.getValue()) {
+ dependents.computeIfAbsent(produce.getStepBuilder(), x -> new HashSet<>()).add(dependent);
}
}
+ return dependents;
}
private StepInfo buildOne(BuildStepBuilder toBuild, Set included, Map mapped,
@@ -475,18 +450,10 @@ void addStep(final BuildStepBuilder stepBuilder, final StackTraceElement[] stack
steps.put(stepBuilder, stackTrace);
}
- BuildStepBuilder getFinalStep() {
- return finalStep;
- }
-
List getProviders() {
return providers;
}
- Map getSteps() {
- return steps;
- }
-
Set getInitialIds() {
return initialIds;
}
@@ -494,4 +461,48 @@ Set getInitialIds() {
Set getFinalIds() {
return finalIds;
}
+
+ private static void outputGraph(Set startSteps, Set endSteps) {
+ if (GRAPH_OUTPUT != null && !GRAPH_OUTPUT.isEmpty()) {
+ try (FileOutputStream fos = new FileOutputStream(GRAPH_OUTPUT)) {
+ try (OutputStreamWriter osw = new OutputStreamWriter(fos, StandardCharsets.UTF_8)) {
+ try (BufferedWriter writer = new BufferedWriter(osw)) {
+ writer.write("digraph {");
+ writer.newLine();
+ writer.write(" node [shape=rectangle];");
+ writer.newLine();
+ writer.write(" rankdir=LR;");
+ writer.newLine();
+ writer.newLine();
+ writer.write(" { rank = same; ");
+ for (StepInfo startStep : startSteps) {
+ writer.write(quoteString(startStep.getBuildStep().getId()));
+ writer.write("; ");
+ }
+ writer.write("};");
+ writer.newLine();
+ writer.write(" { rank = same; ");
+ for (StepInfo endStep : endSteps) {
+ if (!startSteps.contains(endStep)) {
+ writer.write(quoteString(endStep.getBuildStep().getId()));
+ writer.write("; ");
+ }
+ }
+ writer.write("};");
+ writer.newLine();
+ writer.newLine();
+ final HashSet printed = new HashSet<>();
+ for (StepInfo step : startSteps) {
+ writeStep(writer, printed, step);
+ }
+ writer.write("}");
+ writer.newLine();
+ }
+ }
+ } catch (IOException ioe) {
+ throw new RuntimeException("Failed to write debug graph output", ioe);
+ }
+ }
+ }
+
}
diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java
index a3493fe3b0959..4d2c23ee429f2 100644
--- a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java
+++ b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java
@@ -156,32 +156,6 @@ public List consumeMulti(Class type, Comparator
return result;
}
- /**
- * Determine if an item was produced and is therefore available to be {@linkplain #consume(Class) consumed}.
- *
- * @param type the item type (must not be {@code null})
- * @return {@code true} if the item was produced and is available, {@code false} if it was not or if this deployer does
- * not consume the named item
- */
- public boolean isAvailableToConsume(Class extends BuildItem> type) {
- final ItemId id = new ItemId(type);
- return stepInfo.getConsumes().contains(id) && id.isMulti()
- ? !execution.getMultis().getOrDefault(id, Collections.emptyList()).isEmpty()
- : execution.getSingles().containsKey(id);
- }
-
- /**
- * Determine if an item will be consumed in this build. If an item is not consumed, then build steps are not
- * required to produce it.
- *
- * @param type the item type (must not be {@code null})
- * @return {@code true} if the item will be consumed, {@code false} if it will not be or if this deployer does
- * not produce the named item
- */
- public boolean isConsumed(Class extends BuildItem> type) {
- return execution.getBuildChain().getConsumed().contains(new ItemId(type));
- }
-
/**
* Emit a build note. This indicates information that the user may be interested in.
*
diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java b/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java
index a2303b83993bd..de947a13fb98d 100644
--- a/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java
+++ b/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java
@@ -24,8 +24,8 @@ public final class BuildExecutionBuilder {
BuildExecutionBuilder(final BuildChain buildChain, final String buildTargetName) {
this.buildChain = buildChain;
this.buildTargetName = buildTargetName;
- initialSingle = new HashMap<>(buildChain.getInitialSingleCount());
- initialMulti = new HashMap<>(buildChain.getInitialMultiCount());
+ initialSingle = new HashMap<>();
+ initialMulti = new HashMap<>();
}
/**
diff --git a/core/deployment/src/test/java/io/quarkus/deployment/dev/FileSystemWatcherTestCase.java b/core/deployment/src/test/java/io/quarkus/deployment/dev/FileSystemWatcherTestCase.java
index fca90ff59fa21..8ea252fa07e54 100644
--- a/core/deployment/src/test/java/io/quarkus/deployment/dev/FileSystemWatcherTestCase.java
+++ b/core/deployment/src/test/java/io/quarkus/deployment/dev/FileSystemWatcherTestCase.java
@@ -167,8 +167,8 @@ private void checkResult(File file, FileChangeEvent.Type type) throws Interrupte
private FileChangeEvent[] consumeEvents() throws InterruptedException {
FileChangeEvent[] nextEvents = new FileChangeEvent[2];
- Collection results = this.results.poll(1, TimeUnit.SECONDS);
- Collection secondResults = this.secondResults.poll(1, TimeUnit.SECONDS);
+ Collection results = this.results.poll(3, TimeUnit.SECONDS);
+ Collection secondResults = this.secondResults.poll(3, TimeUnit.SECONDS);
Assertions.assertNotNull(results);
Assertions.assertNotNull(secondResults);
Assertions.assertEquals(1, results.size());
diff --git a/docs/src/main/asciidoc/config-yaml.adoc b/docs/src/main/asciidoc/config-yaml.adoc
index 4755ae64e36df..9a84dcfb9f515 100644
--- a/docs/src/main/asciidoc/config-yaml.adoc
+++ b/docs/src/main/asciidoc/config-yaml.adoc
@@ -1,28 +1,28 @@
////
-This guide is maintained in the main Quarkus repository
-and pull requests should be submitted there:
+This guide is maintained in the main Quarkus repository, and pull requests should be submitted there:
https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc
////
-= YAML Configuration
+[id="config-yaml"]
+= YAML configuration
include::_attributes.adoc[]
+:diataxis-type: howto
:categories: core
-:summary: YAML as a Configuration Source.
+:summary: Optionally, use `application.yaml` instead of `application.properties` to configure your application.
:topics: configuration
:extensions: io.quarkus:quarkus-config-yaml
-https://en.wikipedia.org/wiki/YAML[YAML] is a very popular format. Kubernetes relies heavily on the YAML format to
-write the various resource descriptors.
+You can use a YAML file,`application.yaml`, to configure your {project-name} application instead of the standard Java properties file, `application.properties`.
-Quarkus offers the possibility to use YAML in addition to the standard Java Properties file.
+link:https://en.wikipedia.org/wiki/YAML[YAML] is widely used for defining resource descriptors, especially in Kubernetes.
-== Enabling YAML Configuration
+== Enable YAML configuration
To enable YAML configuration, add the `quarkus-config-yaml` extension:
:add-extension-extensions: quarkus-config-yaml
include::{includes}/devtools/extension-add.adoc[]
-You can also just add the following dependency into your project:
+Alternatively, add the following dependency to your project:
[source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"]
.pom.xml
@@ -39,16 +39,15 @@ You can also just add the following dependency into your project:
implementation("io.quarkus:quarkus-config-yaml")
----
-Remove the `src/main/resources/application.properties` and create a `src/main/resources/application.yaml` file.
+After adding the extension or dependency, to avoid confusion, remove the `src/main/resources/application.properties` file and create a `src/main/resources/application.yaml` file.
-NOTE: If both are present, Quarkus prioritizes configuration properties from the YAML file first and then from the
-Properties file. However, to avoid confusion, we recommend removing the Properties file.
+NOTE: If both files are present, {project-name} gives precedence to properties in the YAML file.
-TIP: Quarkus supports both the `yml` and `yaml` file extensions.
+TIP: {project-name} recognizes both `.yml` and `.yaml` file extensions.
-=== Example
+=== Example YAML configurations
-The following snippets provide examples of YAML configuration:
+The following snippets give examples of YAML configurations:
[source,yaml]
----
@@ -113,13 +112,18 @@ app:
== Profiles
-As you can see in the previous snippet, you can use xref:config-reference.adoc#profiles[profiles] in YAML. The profile
-key requires double quotes: `"%test"`. This is because YAML does not support keys starting with `%`.
+As you can see in the previous snippet, you can use xref:{doc-guides}/config-reference.adoc#profiles[profiles] in YAML.
-Everything under the `"%test"` key is only enabled when the `test` profile is active. For example, in the previous
-snippet it disables OIDC (`quarkus.oidc.enabled: false`), whereas without the `test` profile, it would be enabled.
-As for the Java Properties format, you can define your own profile:
+In YAML, keys that begin with `%` are not allowed.
+However, profile keys must start with this symbol.
+To resolve this, enclose the profile keys in double quotes, as demonstrated by the example, `"%test"`.
+
+All configurations under the `"%test"` key activate only when the `test` profile is enabled.
+For instance, the previous snippet shows that OpenID Connect (OIDC) (`quarkus.oidc.enabled: false`) is disabled when the `test` profile is active.
+Without the `test` profile, OIDC is enabled by default.
+
+You can also define custom profiles, such as `%staging` in the following example:
[source, yaml]
----
@@ -133,12 +137,12 @@ quarkus:
port: 8082
----
-If you enable the `staging` profile, the HTTP port will be 8082, whereas it would be 8081 otherwise.
+If you enable the `staging` profile, the HTTP port is set to `8082` instead of `8081`.
-The YAML configuration also support profile aware files. In this case, properties for a specific profile may reside in
-an `application-{profile}.yaml` named file. The previous example may be expressed as:
+The YAML configuration also supports profile-aware files.
+In this case, properties for a specific profile can reside in an `application-{profile}.yaml` named file.
+The previous example can be expressed as:
-.application.yaml
[source, yaml]
----
quarkus:
@@ -156,8 +160,7 @@ quarkus:
== Expressions
-The YAML format also supports xref:config-reference.adoc#property-expressions[property expressions], using the same format as Java
-Properties:
+The YAML format also supports xref:config-reference.adoc#property-expressions[property expressions], by using the same format as Java properties:
[source, yaml]
----
@@ -172,12 +175,12 @@ display:
factor: ${x.factor}
----
-Note that you can reference nested properties using the `.` (dot) separator as in `${x.factor}`.
+You can reference nested properties by using the `.` (dot) separator, as in `${x.factor}`.
== External application.yaml file
-The `application.yaml` file may also be placed in `config/application.yaml` to specialize the runtime configuration. The
-file has to be present in the root of the working directory relative to the Quarkus application runner:
+The `application.yaml` file can also be placed in `config/application.yaml` to specialize the runtime configuration.
+The file must be present in the root of the working directory relative to the {project-name} application runner:
[source, text]
----
@@ -187,16 +190,16 @@ file has to be present in the root of the working directory relative to the Quar
├── my-app-runner
----
-The values from this file override any values from the regular `application.yaml` file if exists.
+The values from this file override any values from the regular `application.yaml` file if it exists.
== Configuration key conflicts
-The MicroProfile Config specification defines configuration keys as an arbitrary `.`-delimited string. However,
-structured formats like YAML only support a subset of the possible configuration namespace. For example, consider the
-two configuration properties `quarkus.http.cors` and `quarkus.http.cors.methods`. One property is the prefix of another,
-so it may not be immediately evident how to specify both keys in your YAML configuration.
+The MicroProfile Config specification defines configuration keys as an arbitrary `.`-delimited string.
+However, structured formats such as YAML only support a subset of the possible configuration namespace.
+For example, consider the two configuration properties `quarkus.http.cors` and `quarkus.http.cors.methods`.
+One property is the prefix of another, so it might not be immediately evident how to specify both keys in your YAML configuration.
-This is solved by using a `null` key (represented by `~`) for any YAML property which is a prefix of another one:
+This is solved by using `~` as a `null` key to represent any YAML property that is a prefix of another one:
[source,yaml]
----
@@ -207,5 +210,4 @@ quarkus:
methods: GET,PUT,POST
----
-YAML `null` keys are not included in the assembly of the configuration property name, allowing them to be used
-in any level for disambiguating configuration keys.
+YAML `null` keys are not included in the assembly of the configuration property name, allowing them to be used at any level for disambiguating configuration keys.
diff --git a/docs/src/main/asciidoc/config.adoc b/docs/src/main/asciidoc/config.adoc
index 5ff455899013c..4ef6b3cd9b262 100644
--- a/docs/src/main/asciidoc/config.adoc
+++ b/docs/src/main/asciidoc/config.adoc
@@ -133,6 +133,25 @@ public String hello() {
TIP: Use `@io.smallrye.config.ConfigMapping` annotation to group multiple configurations in a single interface. Please,
check the https://smallrye.io/smallrye-config/Main/config/mappings/[Config Mappings] documentation.
+[[secrets-in-environment-properties]]
+== Store secrets in an environment properties file
+
+A secret (such as a password, a personal access token or an API key) must not end up in version control
+for security reasons. One way is to store them in a local environment properties (`.env`) file:
+
+. Store the secret in the `.env` file in the project root directory.
++
+.The .env file
+[source,properties]
+----
+foo.api-key=ThisIsSecret
+----
+
+. Add the `.env` file to `.gitignore`.
+
+`mvn quarkus:dev` automatically picks up the properties in the `.env` file,
+similar to those in the `application.properties` file.
+
== Update the test
We also need to update the functional test to reflect the changes made to the endpoint.
diff --git a/docs/src/main/asciidoc/grpc-generation-reference.adoc b/docs/src/main/asciidoc/grpc-generation-reference.adoc
index 6dc05db4db955..6d871816c3c33 100644
--- a/docs/src/main/asciidoc/grpc-generation-reference.adoc
+++ b/docs/src/main/asciidoc/grpc-generation-reference.adoc
@@ -1,4 +1,10 @@
+////
+This guide is maintained in the main Quarkus repository
+and pull requests should be submitted there:
+https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc
+////
= gRPC code generation reference guide
+include::_attributes.adoc[]
:categories: Serialization
:diataxis-type: Reference
:summary: Learn how to configure gRPC code generation.
@@ -15,7 +21,7 @@ By default, `\*.proto` files located in the `src/main/proto` directory are compi
To enable gRPC code generation, add the following dependency to your project:
-[source,xml,subs="attributes"]
+[source,xml,subs=attributes+]
----
io.quarkus
@@ -25,7 +31,7 @@ To enable gRPC code generation, add the following dependency to your project:
Next, ensure that the `generate-code` phase is enabled in the Quarkus Maven plugin:
-[source,xml,subs="attributes"]
+[source,xml,subs=attributes+]
----
${quarkus.platform.group-id}
@@ -48,7 +54,7 @@ Next, ensure that the `generate-code` phase is enabled in the Quarkus Maven plug
For Gradle, add the following dependency to your project:
-[source,gradle,subs="attributes"]
+[source,gradle,subs=attributes+]
----
implementation 'io.quarkus:quarkus-grpc'
----
@@ -60,7 +66,7 @@ You can configure this location using the `quarkus.grpc.codegen.proto-directory`
With Maven, add the following configuration:
-[source,xml,subs="attributes"]
+[source,xml,subs=attributes+]
----
${quarkus.platform.group-id}
@@ -86,7 +92,7 @@ With Maven, add the following configuration:
With Gradle, use the following configuration:
-[source,gradle,subs="attributes"]
+[source,gradle,subs=attributes+]
----
quarkus {
quarkusBuildProperties.put("quarkus.grpc.codegen.proto-directory", "${project.projectDir}/ext/proto")
@@ -159,7 +165,7 @@ However, it's recommended to use Quarkus support unless you have a specific need
To do this, define the following properties in the `` section:
-[source,xml,subs="attributes"]
+[source,xml,subs=attributes+]
----
{grpc-version}{protoc-version}
@@ -235,7 +241,7 @@ When gRPC classes, which are classes generated from `proto` files, are in a depe
You can create a Jandex index using the `jandex-maven-plugin`.
More information on this topic can be found in the xref:cdi-reference.adoc#bean_discovery[Bean Discovery] section of the CDI guide.
-[source,xml,subs="attributes+"]
+[source,xml,subs=attributes+]
----
@@ -258,7 +264,7 @@ More information on this topic can be found in the xref:cdi-reference.adoc#bean_
If you are using Gradle, you can use the following configuration:
-[source, gradle, subs="attributes+"]
+[source, gradle, subs=attributes+]
----
plugins {
id 'org.kordamp.gradle.jandex' version '1.1.0'
@@ -266,4 +272,4 @@ plugins {
----
IMPORTANT: It is recommended to package the `proto` files in a dependency instead of the generated classes, so Quarkus can generate optimized classes.
-Refer to the <> for more information.
\ No newline at end of file
+Refer to the <> for more information.
diff --git a/docs/src/main/asciidoc/grpc-getting-started.adoc b/docs/src/main/asciidoc/grpc-getting-started.adoc
index e8aaa90d7eab4..7d4b834f150a5 100644
--- a/docs/src/main/asciidoc/grpc-getting-started.adoc
+++ b/docs/src/main/asciidoc/grpc-getting-started.adoc
@@ -101,7 +101,7 @@ You can also download the suitable binary and specify the location via
Alternatively to using the `generate-code` goal of the `quarkus-maven-plugin`, you can use `protobuf-maven-plugin` to generate these files.
-See the <> section for more information.
+See the <> section for more information.
Let's start with a simple _Hello_ service.
Create the `src/main/proto/helloworld.proto` file with the following content:
diff --git a/docs/src/main/asciidoc/reactive-event-bus.adoc b/docs/src/main/asciidoc/reactive-event-bus.adoc
index 8d6197323bab1..b1c03416d0770 100644
--- a/docs/src/main/asciidoc/reactive-event-bus.adoc
+++ b/docs/src/main/asciidoc/reactive-event-bus.adoc
@@ -26,7 +26,7 @@ However, it is limited to single-event behavior (no stream) and to local message
== Installing
This mechanism uses the Vert.x EventBus, so you need to enable the `vertx` extension to use this feature.
-If you are creating a new project, set the `extensions` parameter are follows:
+If you are creating a new project, set the `extensions` parameter as follows:
:create-app-artifact-id: vertx-quickstart
:create-app-extensions: vertx,resteasy-reactive
diff --git a/docs/src/main/asciidoc/security-jpa.adoc b/docs/src/main/asciidoc/security-jpa.adoc
index 6340b379c0403..e93fd321100fb 100644
--- a/docs/src/main/asciidoc/security-jpa.adoc
+++ b/docs/src/main/asciidoc/security-jpa.adoc
@@ -185,6 +185,8 @@ For applications running in a production environment, do not store passwords as
However, it is possible to store passwords as plain text with the `@Password(PasswordType.CLEAR)` annotation when operating in a test environment.
====
+include::{generated-dir}/config/quarkus-security-jpa.adoc[opts=optional, leveloffset=+2]
+
== References
* xref:security-getting-started-tutorial.adoc[Getting Started with Security using Basic authentication and Jakarta Persistence]
diff --git a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc
index 1cde2425bc9f4..b6d397bbb3d72 100644
--- a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc
+++ b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc
@@ -1023,6 +1023,92 @@ If you set `quarkus.oidc.client-id` but your endpoint does not require remote ac
Note Quarkus `web-app` applications always require `quarkus.oidc.client-id` property.
+== Authentication after HTTP request has completed
+
+Sometimes, `SecurityIdentity` for a given token must be created when there is no active HTTP request context.
+The `quarkus-oidc` extension provides `io.quarkus.oidc.TenantIdentityProvider` to convert a token to a `SecurityIdentity` instance.
+For example, one situation when you must verify the token after HTTP request has completed is when you are
+processing messages with the xref:vertx.adoc#event-bus[Vert.x event bus].
+In the example below, the 'product-order' message is consumed within different CDI request context, therefore
+an injected `SecurityIdentity` would not correctly represent the verified identity and be anonymous.
+
+[source,java]
+----
+package org.acme.quickstart.oidc;
+
+import static jakarta.ws.rs.core.HttpHeaders.AUTHORIZATION;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import io.vertx.core.eventbus.EventBus;
+
+@Path("order")
+public class OrderResource {
+
+ @Inject
+ EventBus eventBus;
+
+ @POST
+ public void order(String product, @HeaderParam(AUTHORIZATION) String bearer) {
+ String rawToken = bearer.substring("Bearer ".length()); <1>
+ eventBus.publish("product-order", new Product(product, 1, rawToken));
+ }
+}
+----
+<1> At this point, token is not verified when proactive authentication is disabled.
+
+[source,java]
+----
+package org.acme.quickstart.oidc;
+
+import jakarta.enterprise.context.ApplicationScoped;
+import jakarta.inject.Inject;
+
+import io.quarkus.oidc.AccessTokenCredential;
+import io.quarkus.oidc.TenantFeature;
+import io.quarkus.oidc.TenantIdentityProvider;
+import io.quarkus.security.identity.SecurityIdentity;
+import io.quarkus.vertx.ConsumeEvent;
+import io.smallrye.common.annotation.Blocking;
+
+@ApplicationScoped
+public class OrderService {
+
+ @TenantFeature("tenantId")
+ @Inject
+ TenantIdentityProvider identityProvider;
+
+ @Inject
+ TenantIdentityProvider defaultIdentityProvider; <1>
+
+ @Blocking
+ @ConsumeEvent("product-order")
+ void processOrder(Product product) {
+ String rawToken = product.customerAccessToken;
+ AccessTokenCredential token = new AccessTokenCredential(rawToken);
+ SecurityIdentity = identityProvider.authenticate(token).await().indefinitely(); <2>
+ ...
+ }
+
+}
+----
+<1> For default tenant, the `TenantFeature` qualifier is optional.
+<2> Executes token verification and converts the token to a `SecurityIdentity`.
+
+[NOTE]
+====
+When the provider is used during an HTTP request, the tenant configuration can be resolved as described in
+the xref:security-openid-connect-multitenancy.adoc[Using OpenID Connect Multi-Tenancy] guide.
+However, when there is no active HTTP request, you need to select tenant explicitly with the `io.quarkus.oidc.TenantFeature` qualifier.
+====
+
+[WARNING]
+====
+xref:security-openid-connect-multitenancy.adoc#tenant-config-resolver[Dynamic tenant configuration resolution] is currently not supported.
+Authentication that requires dynamic tenant will fail.
+====
+
== References
* xref:security-oidc-configuration-properties-reference.adoc[OIDC configuration properties]
@@ -1038,3 +1124,4 @@ Note Quarkus `web-app` applications always require `quarkus.oidc.client-id` prop
* xref:security-authentication-mechanisms.adoc#combining-authentication-mechanisms[Combining authentication mechanisms]
* xref:security-overview.adoc[Quarkus Security overview]
* xref:security-keycloak-admin-client.adoc[Quarkus Keycloak Admin Client]
+* xref:security-openid-connect-multitenancy.adoc[Using OpenID Connect Multi-Tenancy]
diff --git a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc
index 02d5f11b14328..c405cabb19516 100644
--- a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc
+++ b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc
@@ -225,7 +225,7 @@ For more information, see xref:security-oidc-bearer-token-authentication.adoc#in
[[keycloak-initialization]]
=== Keycloak Initialization
-The `quay.io/keycloak/keycloak:22.0.4` image which contains a Keycloak distribution powered by Quarkus is used to start a container by default.
+The `quay.io/keycloak/keycloak:22.0.5` image which contains a Keycloak distribution powered by Quarkus is used to start a container by default.
`quarkus.keycloak.devservices.image-name` can be used to change the Keycloak image name. For example, set it to `quay.io/keycloak/keycloak:19.0.3-legacy` to use a Keycloak distribution powered by WildFly.
Note that only a Quarkus based Keycloak distribution is available starting from Keycloak `20.0.0`.
diff --git a/docs/src/main/asciidoc/security-overview.adoc b/docs/src/main/asciidoc/security-overview.adoc
index 3b3c0cafa1314..9e26731e6d328 100644
--- a/docs/src/main/asciidoc/security-overview.adoc
+++ b/docs/src/main/asciidoc/security-overview.adoc
@@ -78,6 +78,11 @@ Secrets engines are components that store, generate, or encrypt data.
Quarkus provides comprehensive HashiCorp Vault support.
For more information, see the link:{vault-guide}[Quarkus and HashiCorp Vault] documentation.
+== Secrets in environment properties
+
+Quarkus provides support to store secrets in environment properties.
+See xref:config.adoc#secrets-in-environment-properties[store secrets in an environment properties file].
+
[[secure-serialization]]
=== Secure serialization
diff --git a/docs/src/main/asciidoc/vertx-reference.adoc b/docs/src/main/asciidoc/vertx-reference.adoc
index fdcc107d726fb..eed2eadb4d316 100644
--- a/docs/src/main/asciidoc/vertx-reference.adoc
+++ b/docs/src/main/asciidoc/vertx-reference.adoc
@@ -854,7 +854,7 @@ So use SockJS, you need to configure the bridge, especially the addresses that w
[source, java]
----
-package org.acme.vertx;
+package org.acme;
import io.vertx.core.Vertx;
import io.vertx.ext.bridge.PermittedOptions;
@@ -875,9 +875,13 @@ public class SockJsExample {
public void init(@Observes Router router) {
SockJSHandler sockJSHandler = SockJSHandler.create(vertx);
- sockJSHandler.bridge(new SockJSBridgeOptions()
+ Router bridge = sockJSHandler.bridge(new SockJSBridgeOptions()
.addOutboundPermitted(new PermittedOptions().setAddress("ticks")));
- router.route("/eventbus/*").handler(sockJSHandler);
+ router.route("/eventbus/*").subRouter(bridge);
+
+ AtomicInteger counter = new AtomicInteger();
+ vertx.setPeriodic(1000,
+ ignored -> vertx.eventBus().publish("ticks", counter.getAndIncrement()));
}
}
diff --git a/docs/src/main/asciidoc/vertx.adoc b/docs/src/main/asciidoc/vertx.adoc
index c5e08b8f5e43d..09fb4a2a4b706 100644
--- a/docs/src/main/asciidoc/vertx.adoc
+++ b/docs/src/main/asciidoc/vertx.adoc
@@ -274,6 +274,7 @@ workbag.”
...
----
+[[event-bus]]
== Using the event bus
One of the core features of Vert.x is the https://vertx.io/docs/vertx-core/java/#event_bus[event bus].
diff --git a/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java b/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java
index b1dc0461f5164..065acd241b0c0 100644
--- a/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java
+++ b/extensions/elasticsearch-rest-client-common/deployment/src/main/java/io/quarkus/elasticsearch/restclient/common/deployment/DevServicesElasticsearchProcessor.java
@@ -268,7 +268,11 @@ private GenericContainer> createOpensearchContainer(ElasticsearchDevServicesBu
container.addEnv("bootstrap.memory_lock", "true");
container.addEnv("plugins.index_state_management.enabled", "false");
-
+ // Disable disk-based shard allocation thresholds: on large, relatively full disks (>90% used),
+ // it will lead to index creation to get stuck waiting for other nodes to join the cluster,
+ // which will never happen since we only have one node.
+ // See https://opensearch.org/docs/latest/api-reference/cluster-api/cluster-settings/
+ container.addEnv("cluster.routing.allocation.disk.threshold_enabled", "false");
container.addEnv("OPENSEARCH_JAVA_OPTS", config.javaOpts);
return container;
}
diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/JpaJandexScavenger.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/JpaJandexScavenger.java
index 198fd9a5ac411..3c23b0b14dc25 100644
--- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/JpaJandexScavenger.java
+++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/JpaJandexScavenger.java
@@ -33,6 +33,7 @@
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.ClassInfo;
+import org.jboss.jandex.Declaration;
import org.jboss.jandex.DotName;
import org.jboss.jandex.FieldInfo;
import org.jboss.jandex.IndexView;
@@ -317,10 +318,17 @@ private void enlistEmbeddedsAndElementCollections(Collector collector) throws Bu
switch (target.kind()) {
case FIELD:
- collectEmbeddedType(embeddedTypes, target.asField().type(), true);
+ var field = target.asField();
+ collectEmbeddedType(embeddedTypes, field.declaringClass(), field, field.type(), true);
break;
case METHOD:
- collectEmbeddedType(embeddedTypes, target.asMethod().returnType(), true);
+ var method = target.asMethod();
+ if (method.isBridge()) {
+ // Generated by javac for covariant return type override.
+ // There's another method with a more specific return type, ignore this one.
+ continue;
+ }
+ collectEmbeddedType(embeddedTypes, method.declaringClass(), method, method.returnType(), true);
break;
default:
throw new IllegalStateException(
@@ -335,10 +343,17 @@ private void enlistEmbeddedsAndElementCollections(Collector collector) throws Bu
switch (target.kind()) {
case FIELD:
- collectElementCollectionTypes(embeddedTypes, target.asField().type());
+ var field = target.asField();
+ collectElementCollectionTypes(embeddedTypes, field.declaringClass(), field, field.type());
break;
case METHOD:
- collectElementCollectionTypes(embeddedTypes, target.asMethod().returnType());
+ var method = target.asMethod();
+ if (method.isBridge()) {
+ // Generated by javac for covariant return type override.
+ // There's another method with a more specific return type, ignore this one.
+ continue;
+ }
+ collectElementCollectionTypes(embeddedTypes, method.declaringClass(), method, method.returnType());
break;
default:
throw new IllegalStateException(
@@ -495,15 +510,16 @@ private static void collectModelType(Collector collector, ClassInfo modelClass)
}
}
- private void collectEmbeddedType(Set embeddedTypes, Type embeddedType, boolean validate)
+ private void collectEmbeddedType(Set embeddedTypes, ClassInfo declaringClass,
+ Declaration attribute, Type attributeType, boolean validate)
throws BuildException {
DotName className;
- switch (embeddedType.kind()) {
+ switch (attributeType.kind()) {
case CLASS:
- className = embeddedType.asClassType().name();
+ className = attributeType.asClassType().name();
break;
case PARAMETERIZED_TYPE:
- className = embeddedType.name();
+ className = attributeType.name();
break;
default:
// do nothing
@@ -511,28 +527,31 @@ private void collectEmbeddedType(Set embeddedTypes, Type embeddedType,
}
if (validate && !index.getClassByName(className).hasAnnotation(ClassNames.EMBEDDABLE)) {
throw new BuildException(
- className + " is used as an embeddable but does not have an @Embeddable annotation.");
+ "Type " + className + " must be annotated with @Embeddable, because it is used as an embeddable."
+ + " This type is used in class " + declaringClass
+ + " for attribute " + attribute + ".");
}
- embeddedTypes.add(embeddedType.name());
+ embeddedTypes.add(attributeType.name());
}
- private void collectElementCollectionTypes(Set embeddedTypes, Type indexType)
+ private void collectElementCollectionTypes(Set embeddedTypes, ClassInfo declaringClass,
+ Declaration attribute, Type attributeType)
throws BuildException {
- switch (indexType.kind()) {
+ switch (attributeType.kind()) {
case CLASS:
// Raw collection type, nothing we can do
break;
case PARAMETERIZED_TYPE:
- embeddedTypes.add(indexType.name());
- var typeArguments = indexType.asParameterizedType().arguments();
+ embeddedTypes.add(attributeType.name());
+ var typeArguments = attributeType.asParameterizedType().arguments();
for (Type typeArgument : typeArguments) {
// We don't validate @Embeddable annotations on element collections at the moment
// See https://github.com/quarkusio/quarkus/pull/35822
- collectEmbeddedType(embeddedTypes, typeArgument, false);
+ collectEmbeddedType(embeddedTypes, declaringClass, attribute, typeArgument, false);
}
break;
case ARRAY:
- collectEmbeddedType(embeddedTypes, indexType.asArrayType().constituent(), true);
+ collectEmbeddedType(embeddedTypes, declaringClass, attribute, attributeType.asArrayType().constituent(), true);
break;
default:
// do nothing
diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerMissingEmbeddableAnnotationTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerMissingEmbeddableAnnotationTest.java
index f880a2193949a..860a8676d83f9 100644
--- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerMissingEmbeddableAnnotationTest.java
+++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerMissingEmbeddableAnnotationTest.java
@@ -39,8 +39,10 @@ class HibernateEntityEnhancerMissingEmbeddableAnnotationTest {
.assertException(ex -> assertThat(ex)
.isNotNull()
.hasMessageContainingAll(
- EntityWithEmbedded.EmbeddableMissingAnnotation.class.getName(),
- "is used as an embeddable but does not have an @Embeddable annotation"));
+ "Type " + EntityWithEmbedded.EmbeddableMissingAnnotation.class.getName(),
+ "must be annotated with @Embeddable, because it is used as an embeddable",
+ "This type is used in class " + EntityWithEmbedded.EmbeddableWithAnnotation.class.getName(),
+ "for attribute ", "embeddableMissingAnnotation"));
// Just test that the embedded non-ID works correctly over a persist/retrieve cycle
@Test
diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerPresentEmbeddableTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerPresentEmbeddableTest.java
index 75093df783891..789b88ff5fb32 100644
--- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerPresentEmbeddableTest.java
+++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/enhancer/HibernateEntityEnhancerPresentEmbeddableTest.java
@@ -11,6 +11,7 @@
import jakarta.persistence.ElementCollection;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Embedded;
+import jakarta.persistence.EmbeddedId;
import jakarta.persistence.Entity;
import jakarta.persistence.EntityManager;
import jakarta.persistence.GeneratedValue;
@@ -31,6 +32,9 @@
* Checks that the missing @Embeddable check doesn't mistakely report
* types that are annotated with @Embeddable (https://github.com/quarkusio/quarkus/issues/35598)
* or generic type parameters on @Embedded field types (https://github.com/quarkusio/quarkus/issues/36065)
+ * or overriden getters annotated with @EmbeddedId/@Embedded where the supertype getter returns a type not annotated
+ * with @Embeddable
+ * (https://github.com/quarkusio/quarkus/issues/36421).
*/
public class HibernateEntityEnhancerPresentEmbeddableTest {
@@ -41,7 +45,9 @@ public class HibernateEntityEnhancerPresentEmbeddableTest {
.addClasses(EntityWithEmbedded.class, EmbeddableWithAnnotation.class,
ExtendedEmbeddableWithAnnotation.class,
NestingEmbeddableWithAnnotation.class,
- GenericEmbeddableWithAnnotation.class))
+ GenericEmbeddableWithAnnotation.class,
+ EntityWithEmbeddedId.class, EntityWithEmbeddedIdAndOverriddenGetter.class,
+ EmbeddableIdWithAnnotation.class))
.withConfigurationResource("application.properties")
.overrideConfigKey("quarkus.hibernate-orm.implicit-naming-strategy", "component-path");
@@ -50,7 +56,7 @@ public class HibernateEntityEnhancerPresentEmbeddableTest {
// Just test that the generic embeddeds work correctly over a persist/retrieve cycle
@Test
- public void smokeTest() {
+ public void embedded_smokeTest() {
Long id = QuarkusTransaction.requiringNew().call(() -> {
EntityWithEmbedded entity = new EntityWithEmbedded();
entity.setName("name");
@@ -97,6 +103,36 @@ public void smokeTest() {
});
}
+ // Just test that the embeddedIds work correctly over a persist/retrieve cycle
+ @Test
+ public void embeddedId_smokeTest() {
+ QuarkusTransaction.requiringNew().run(() -> {
+ EntityWithEmbeddedId entity1 = new EntityWithEmbeddedId();
+ entity1.setId(new EmbeddableIdWithAnnotation("1"));
+ em.persist(entity1);
+ });
+
+ QuarkusTransaction.requiringNew().run(() -> {
+ EntityWithEmbeddedId entity = em.find(EntityWithEmbeddedId.class, new EmbeddableIdWithAnnotation("1"));
+ assertThat(entity).isNotNull();
+ });
+ }
+
+ @Test
+ public void embeddedIdAndOverriddenGetter_smokeTest() {
+ QuarkusTransaction.requiringNew().run(() -> {
+ EntityWithEmbeddedIdAndOverriddenGetter entity1 = new EntityWithEmbeddedIdAndOverriddenGetter();
+ entity1.setId(new EmbeddableIdWithAnnotation("2"));
+ em.persist(entity1);
+ });
+
+ QuarkusTransaction.requiringNew().run(() -> {
+ EntityWithEmbeddedIdAndOverriddenGetter entity = em.find(EntityWithEmbeddedIdAndOverriddenGetter.class,
+ new EmbeddableIdWithAnnotation("2"));
+ assertThat(entity).isNotNull();
+ });
+ }
+
@Entity
public static class EntityWithEmbedded {
@@ -290,4 +326,55 @@ public void setValue(T value) {
}
}
+ @Entity
+ public static class EntityWithEmbeddedId {
+ @EmbeddedId
+ private EmbeddableIdWithAnnotation id;
+
+ public EmbeddableIdWithAnnotation getId() {
+ return id;
+ }
+
+ public void setId(EmbeddableIdWithAnnotation id) {
+ this.id = id;
+ }
+ }
+
+ @MappedSuperclass
+ public interface Identifiable {
+ Object getId();
+ }
+
+ @Entity
+ public static class EntityWithEmbeddedIdAndOverriddenGetter implements Identifiable {
+ private EmbeddableIdWithAnnotation id;
+
+ @Override
+ @EmbeddedId
+ public EmbeddableIdWithAnnotation getId() {
+ return id;
+ }
+
+ public void setId(EmbeddableIdWithAnnotation id) {
+ this.id = id;
+ }
+ }
+
+ @Embeddable
+ public static class EmbeddableIdWithAnnotation {
+ private String text;
+
+ protected EmbeddableIdWithAnnotation() {
+ // For Hibernate ORM only - it will change the property value through reflection
+ }
+
+ public EmbeddableIdWithAnnotation(String text) {
+ this.text = text;
+ }
+
+ public String getText() {
+ return text;
+ }
+ }
+
}
diff --git a/extensions/oidc-db-token-state-manager/deployment/src/test/java/io/quarkus/oidc/db/token/state/manager/MySqlDbTokenStateManagerTest.java b/extensions/oidc-db-token-state-manager/deployment/src/test/java/io/quarkus/oidc/db/token/state/manager/MySqlDbTokenStateManagerTest.java
index 3a729ac393a78..f887874247e99 100644
--- a/extensions/oidc-db-token-state-manager/deployment/src/test/java/io/quarkus/oidc/db/token/state/manager/MySqlDbTokenStateManagerTest.java
+++ b/extensions/oidc-db-token-state-manager/deployment/src/test/java/io/quarkus/oidc/db/token/state/manager/MySqlDbTokenStateManagerTest.java
@@ -1,9 +1,12 @@
package io.quarkus.oidc.db.token.state.manager;
+import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
import org.junit.jupiter.api.extension.RegisterExtension;
import io.quarkus.test.QuarkusUnitTest;
+// Becomes flaky in Github CI due to limited resources
+@EnabledIfSystemProperty(named = "run-mysql-db-token-state-manager-test", disabledReason = "Insufficient GH CI resources", matches = "true")
public class MySqlDbTokenStateManagerTest extends AbstractDbTokenStateManagerTest {
@RegisterExtension
diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java
index daa6a390ee240..a05726ec04991 100644
--- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java
+++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java
@@ -1,11 +1,15 @@
package io.quarkus.oidc.deployment;
+import static io.quarkus.arc.processor.BuiltinScope.APPLICATION;
+import static io.quarkus.arc.processor.DotNames.DEFAULT;
+import static io.quarkus.oidc.runtime.OidcUtils.DEFAULT_TENANT_ID;
import static io.quarkus.vertx.http.deployment.EagerSecurityInterceptorCandidateBuildItem.hasProperEndpointModifiers;
import static org.jboss.jandex.AnnotationTarget.Kind.CLASS;
import static org.jboss.jandex.AnnotationTarget.Kind.METHOD;
import java.util.HashMap;
import java.util.Map;
+import java.util.Set;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import java.util.stream.Collectors;
@@ -21,8 +25,12 @@
import org.jboss.logging.Logger;
import io.quarkus.arc.deployment.AdditionalBeanBuildItem;
+import io.quarkus.arc.deployment.BeanDiscoveryFinishedBuildItem;
+import io.quarkus.arc.deployment.QualifierRegistrarBuildItem;
import io.quarkus.arc.deployment.SynthesisFinishedBuildItem;
import io.quarkus.arc.deployment.SyntheticBeanBuildItem;
+import io.quarkus.arc.processor.InjectionPointInfo;
+import io.quarkus.arc.processor.QualifierRegistrar;
import io.quarkus.deployment.Capabilities;
import io.quarkus.deployment.Capability;
import io.quarkus.deployment.Feature;
@@ -38,6 +46,8 @@
import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem;
import io.quarkus.oidc.SecurityEvent;
import io.quarkus.oidc.Tenant;
+import io.quarkus.oidc.TenantFeature;
+import io.quarkus.oidc.TenantIdentityProvider;
import io.quarkus.oidc.TokenIntrospectionCache;
import io.quarkus.oidc.UserInfoCache;
import io.quarkus.oidc.runtime.BackChannelLogoutHandler;
@@ -69,6 +79,8 @@
public class OidcBuildStep {
public static final DotName DOTNAME_SECURITY_EVENT = DotName.createSimple(SecurityEvent.class.getName());
private static final DotName TENANT_NAME = DotName.createSimple(Tenant.class);
+ private static final DotName TENANT_FEATURE_NAME = DotName.createSimple(TenantFeature.class);
+ private static final DotName TENANT_IDENTITY_PROVIDER_NAME = DotName.createSimple(TenantIdentityProvider.class);
private static final Logger LOG = Logger.getLogger(OidcBuildStep.class);
@BuildStep
@@ -129,6 +141,74 @@ ExtensionSslNativeSupportBuildItem enableSslInNative() {
return new ExtensionSslNativeSupportBuildItem(Feature.OIDC);
}
+ @BuildStep
+ QualifierRegistrarBuildItem addQualifiers() {
+ // this seems to be necessary; I think it's because sometimes we only access beans
+ // annotated with @TenantFeature programmatically and no injection point is annotated with it
+ return new QualifierRegistrarBuildItem(new QualifierRegistrar() {
+ @Override
+ public Map> getAdditionalQualifiers() {
+ return Map.of(TENANT_FEATURE_NAME, Set.of());
+ }
+ });
+ }
+
+ /**
+ * Produce {@link OidcIdentityProvider} with already selected tenant for each {@link OidcIdentityProvider}
+ * injection point annotated with {@link TenantFeature} annotation.
+ * For example, we produce {@link OidcIdentityProvider} with pre-selected tenant 'my-tenant' for injection point:
+ *
+ *
+ * @Inject
+ * @TenantFeature("my-tenant")
+ * OidcIdentityProvider identityProvider;
+ *
+ */
+ @Record(ExecutionTime.STATIC_INIT)
+ @BuildStep
+ void produceTenantIdentityProviders(BuildProducer syntheticBeanProducer,
+ OidcRecorder recorder, BeanDiscoveryFinishedBuildItem beans, CombinedIndexBuildItem combinedIndex) {
+ // create TenantIdentityProviders for tenants selected with @TenantFeature like: @TenantFeature("my-tenant")
+ if (!combinedIndex.getIndex().getAnnotations(TENANT_FEATURE_NAME).isEmpty()) {
+ // create TenantIdentityProviders for tenants selected with @TenantFeature like: @TenantFeature("my-tenant")
+ beans
+ .getInjectionPoints()
+ .stream()
+ .filter(ip -> ip.getRequiredQualifier(TENANT_FEATURE_NAME) != null)
+ .filter(OidcBuildStep::isTenantIdentityProviderType)
+ .map(ip -> ip.getRequiredQualifier(TENANT_FEATURE_NAME).value().asString())
+ .distinct()
+ .forEach(tenantName -> syntheticBeanProducer.produce(
+ SyntheticBeanBuildItem
+ .configure(TenantIdentityProvider.class)
+ .addQualifier().annotation(TENANT_FEATURE_NAME).addValue("value", tenantName).done()
+ .scope(APPLICATION.getInfo())
+ .supplier(recorder.createTenantIdentityProvider(tenantName))
+ .unremovable()
+ .done()));
+ }
+ // create TenantIdentityProvider for default tenant when tenant is not explicitly selected via @TenantFeature
+ boolean createTenantIdentityProviderForDefaultTenant = beans
+ .getInjectionPoints()
+ .stream()
+ .filter(InjectionPointInfo::hasDefaultedQualifier)
+ .anyMatch(OidcBuildStep::isTenantIdentityProviderType);
+ if (createTenantIdentityProviderForDefaultTenant) {
+ syntheticBeanProducer.produce(
+ SyntheticBeanBuildItem
+ .configure(TenantIdentityProvider.class)
+ .addQualifier(DEFAULT)
+ .scope(APPLICATION.getInfo())
+ .supplier(recorder.createTenantIdentityProvider(DEFAULT_TENANT_ID))
+ .unremovable()
+ .done());
+ }
+ }
+
+ private static boolean isTenantIdentityProviderType(InjectionPointInfo ip) {
+ return TENANT_IDENTITY_PROVIDER_NAME.equals(ip.getRequiredType().name());
+ }
+
@Record(ExecutionTime.RUNTIME_INIT)
@BuildStep
public SyntheticBeanBuildItem setup(
diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java
index e5deeb150fdf0..624675b4fd05c 100644
--- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java
+++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/DevServicesConfig.java
@@ -35,7 +35,7 @@ public class DevServicesConfig {
* string.
* Set 'quarkus.keycloak.devservices.keycloak-x-image' to override this check.
*/
- @ConfigItem(defaultValue = "quay.io/keycloak/keycloak:22.0.4")
+ @ConfigItem(defaultValue = "quay.io/keycloak/keycloak:22.0.5")
public String imageName;
/**
diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/TenantFeature.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/TenantFeature.java
index 01ed813cad152..34126a84f21ad 100644
--- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/TenantFeature.java
+++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/TenantFeature.java
@@ -1,19 +1,54 @@
package io.quarkus.oidc;
+import static java.lang.annotation.ElementType.FIELD;
+import static java.lang.annotation.ElementType.METHOD;
+import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.ElementType.TYPE;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
+import jakarta.enterprise.util.AnnotationLiteral;
+import jakarta.inject.Qualifier;
+
/**
- * Annotation which can be used to associate one or more OIDC features with a named tenant.
+ * Qualifier used to specify which named tenant is associated with one or more OIDC feature.
*/
-@Target({ TYPE })
-@Retention(RetentionPolicy.RUNTIME)
+@Target({ METHOD, FIELD, PARAMETER, TYPE })
+@Retention(RUNTIME)
+@Documented
+@Qualifier
public @interface TenantFeature {
/**
* Identifies an OIDC tenant to which a given feature applies.
*/
String value();
+
+ /**
+ * Supports inline instantiation of the {@link TenantFeature} qualifier.
+ */
+ final class TenantFeatureLiteral extends AnnotationLiteral implements TenantFeature {
+
+ private final String value;
+
+ private TenantFeatureLiteral(String value) {
+ this.value = value;
+ }
+
+ @Override
+ public String value() {
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return "TenantFeatureLiteral [value=" + value + "]";
+ }
+
+ public static TenantFeature of(String value) {
+ return new TenantFeatureLiteral(value);
+ }
+ }
}
diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/TenantIdentityProvider.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/TenantIdentityProvider.java
new file mode 100644
index 0000000000000..fd37e50e8c4a8
--- /dev/null
+++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/TenantIdentityProvider.java
@@ -0,0 +1,15 @@
+package io.quarkus.oidc;
+
+import io.quarkus.security.identity.SecurityIdentity;
+import io.smallrye.mutiny.Uni;
+
+/**
+ * Tenant-specific {@link SecurityIdentity} provider. Associated tenant configuration needs to be selected
+ * with the {@link TenantFeature} qualifier. When injection point is not annotated with the {@link TenantFeature}
+ * qualifier, default tenant is selected.
+ */
+public interface TenantIdentityProvider {
+
+ Uni authenticate(AccessTokenCredential token);
+
+}
diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTenantConfigResolver.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTenantConfigResolver.java
index 784eae19f8c2d..1448c112aa928 100644
--- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTenantConfigResolver.java
+++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/DefaultTenantConfigResolver.java
@@ -3,6 +3,7 @@
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
+import java.util.function.Supplier;
import jakarta.annotation.PostConstruct;
import jakarta.enterprise.context.ApplicationScoped;
@@ -113,33 +114,37 @@ public OidcTenantConfig apply(OidcTenantConfig tenantConfig) {
});
}
+ Uni resolveContext(String tenantId) {
+ return initializeTenantIfContextNotReady(getStaticTenantContext(tenantId));
+ }
+
Uni resolveContext(RoutingContext context) {
- return getDynamicTenantContext(context).chain(new Function>() {
+ return getDynamicTenantContext(context).onItem().ifNull().switchTo(new Supplier>() {
@Override
- public Uni extends TenantConfigContext> apply(TenantConfigContext tenantConfigContext) {
- if (tenantConfigContext != null) {
- return Uni.createFrom().item(tenantConfigContext);
- }
- TenantConfigContext tenantContext = getStaticTenantContext(context);
- if (tenantContext != null && !tenantContext.ready) {
-
- // check if the connection has already been created
- TenantConfigContext readyTenantContext = tenantConfigBean.getDynamicTenantsConfig()
- .get(tenantContext.oidcConfig.tenantId.get());
- if (readyTenantContext == null) {
- LOG.debugf("Tenant '%s' is not initialized yet, trying to create OIDC connection now",
- tenantContext.oidcConfig.tenantId.get());
- return tenantConfigBean.getTenantConfigContextFactory().apply(tenantContext.oidcConfig);
- } else {
- tenantContext = readyTenantContext;
- }
- }
-
- return Uni.createFrom().item(tenantContext);
+ public Uni extends TenantConfigContext> get() {
+ return initializeTenantIfContextNotReady(getStaticTenantContext(context));
}
});
}
+ private Uni initializeTenantIfContextNotReady(TenantConfigContext tenantContext) {
+ if (tenantContext != null && !tenantContext.ready) {
+
+ // check if the connection has already been created
+ TenantConfigContext readyTenantContext = tenantConfigBean.getDynamicTenantsConfig()
+ .get(tenantContext.oidcConfig.tenantId.get());
+ if (readyTenantContext == null) {
+ LOG.debugf("Tenant '%s' is not initialized yet, trying to create OIDC connection now",
+ tenantContext.oidcConfig.tenantId.get());
+ return tenantConfigBean.getTenantConfigContextFactory().apply(tenantContext.oidcConfig);
+ } else {
+ tenantContext = readyTenantContext;
+ }
+ }
+
+ return Uni.createFrom().item(tenantContext);
+ }
+
private TenantConfigContext getStaticTenantContext(RoutingContext context) {
String tenantId = context.get(CURRENT_STATIC_TENANT_ID);
@@ -161,6 +166,10 @@ private TenantConfigContext getStaticTenantContext(RoutingContext context) {
context.put(CURRENT_STATIC_TENANT_ID_NULL, true);
}
+ return getStaticTenantContext(tenantId);
+ }
+
+ private TenantConfigContext getStaticTenantContext(String tenantId) {
TenantConfigContext configContext = tenantId != null ? tenantConfigBean.getStaticTenantsConfig().get(tenantId) : null;
if (configContext == null) {
if (tenantId != null && !tenantId.isEmpty()) {
diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java
index 164b9c6fe8d29..26d053339d97a 100644
--- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java
+++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcIdentityProvider.java
@@ -1,8 +1,10 @@
package io.quarkus.oidc.runtime;
import static io.quarkus.oidc.runtime.OidcUtils.validateAndCreateIdentity;
+import static io.quarkus.vertx.http.runtime.security.HttpSecurityUtils.getRoutingContextAttribute;
import java.security.Principal;
+import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
@@ -32,10 +34,8 @@
import io.quarkus.security.identity.request.TokenAuthenticationRequest;
import io.quarkus.security.runtime.QuarkusSecurityIdentity;
import io.quarkus.security.spi.runtime.BlockingSecurityExecutor;
-import io.quarkus.vertx.http.runtime.security.HttpSecurityUtils;
import io.smallrye.mutiny.Uni;
import io.vertx.core.json.JsonObject;
-import io.vertx.ext.web.RoutingContext;
@ApplicationScoped
public class OidcIdentityProvider implements IdentityProvider {
@@ -48,12 +48,12 @@ public class OidcIdentityProvider implements IdentityProvider NULL_CODE_ACCESS_TOKEN_UNI = Uni.createFrom().nullItem();
private static final String CODE_ACCESS_TOKEN_RESULT = "code_flow_access_token_result";
- private final DefaultTenantConfigResolver tenantResolver;
+ protected final DefaultTenantConfigResolver tenantResolver;
private final BlockingTaskRunner uniVoidOidcContext;
private final BlockingTaskRunner getIntrospectionRequestContext;
private final BlockingTaskRunner getUserInfoRequestContext;
- public OidcIdentityProvider(DefaultTenantConfigResolver tenantResolver, BlockingSecurityExecutor blockingExecutor) {
+ OidcIdentityProvider(DefaultTenantConfigResolver tenantResolver, BlockingSecurityExecutor blockingExecutor) {
this.tenantResolver = tenantResolver;
this.uniVoidOidcContext = new BlockingTaskRunner<>(blockingExecutor);
this.getIntrospectionRequestContext = new BlockingTaskRunner<>(blockingExecutor);
@@ -72,65 +72,69 @@ public Uni authenticate(TokenAuthenticationRequest request,
return Uni.createFrom().nullItem();
}
LOG.debug("Starting creating SecurityIdentity");
- RoutingContext vertxContext = HttpSecurityUtils.getRoutingContextAttribute(request);
- vertxContext.put(AuthenticationRequestContext.class.getName(), context);
- Uni tenantConfigContext = tenantResolver.resolveContext(vertxContext);
-
- return tenantConfigContext.onItem()
+ return resolveTenantConfigContext(request, context).onItem()
.transformToUni(new Function>() {
@Override
public Uni apply(TenantConfigContext tenantConfigContext) {
return Uni.createFrom().deferred(new Supplier>() {
@Override
public Uni get() {
- return authenticate(request, vertxContext, tenantConfigContext);
+ return authenticate(request, getRequestData(request), tenantConfigContext);
}
});
}
});
}
- private Uni authenticate(TokenAuthenticationRequest request,
- RoutingContext vertxContext,
+ protected Uni resolveTenantConfigContext(TokenAuthenticationRequest request,
+ AuthenticationRequestContext context) {
+ return tenantResolver.resolveContext(
+ getRoutingContextAttribute(request).put(AuthenticationRequestContext.class.getName(), context));
+ }
+
+ protected Map getRequestData(TokenAuthenticationRequest request) {
+ return getRoutingContextAttribute(request).data();
+ }
+
+ private Uni authenticate(TokenAuthenticationRequest request, Map requestData,
TenantConfigContext resolvedContext) {
if (resolvedContext.oidcConfig.publicKey.isPresent()) {
LOG.debug("Performing token verification with a configured public key");
return validateTokenWithoutOidcServer(request, resolvedContext);
} else {
- return validateAllTokensWithOidcServer(vertxContext, request, resolvedContext);
+ return validateAllTokensWithOidcServer(requestData, request, resolvedContext);
}
}
- private Uni validateAllTokensWithOidcServer(RoutingContext vertxContext,
- TokenAuthenticationRequest request,
- TenantConfigContext resolvedContext) {
+ private Uni validateAllTokensWithOidcServer(Map requestData,
+ TokenAuthenticationRequest request, TenantConfigContext resolvedContext) {
if (resolvedContext.oidcConfig.token.verifyAccessTokenWithUserInfo.orElse(false)
- && isOpaqueAccessToken(vertxContext, request, resolvedContext)) {
+ && isOpaqueAccessToken(requestData, request, resolvedContext)) {
// UserInfo has to be acquired first as a precondition for verifying opaque access tokens.
// Typically it will be done for bearer access tokens therefore even if the access token has expired
// the client will be able to refresh if needed, no refresh token is available to Quarkus during the
// bearer access token verification
if (resolvedContext.oidcConfig.authentication.isUserInfoRequired().orElse(false)) {
- return getUserInfoUni(vertxContext, request, resolvedContext).onItemOrFailure().transformToUni(
+ return getUserInfoUni(requestData, request, resolvedContext).onItemOrFailure().transformToUni(
new BiFunction>() {
@Override
public Uni apply(UserInfo userInfo, Throwable t) {
if (t != null) {
return Uni.createFrom().failure(new AuthenticationFailedException(t));
}
- return validateTokenWithUserInfoAndCreateIdentity(vertxContext, request, resolvedContext,
+ return validateTokenWithUserInfoAndCreateIdentity(requestData, request, resolvedContext,
userInfo);
}
});
} else {
- return validateTokenWithUserInfoAndCreateIdentity(vertxContext, request, resolvedContext, null);
+ return validateTokenWithUserInfoAndCreateIdentity(requestData, request, resolvedContext, null);
}
} else {
final Uni primaryTokenUni;
if (isInternalIdToken(request)) {
- if (vertxContext.get(NEW_AUTHENTICATION) == Boolean.TRUE) {
+ if (requestData.get(NEW_AUTHENTICATION) == Boolean.TRUE) {
// No need to verify it in this case as 'CodeAuthenticationMechanism' has just created it
primaryTokenUni = Uni.createFrom()
.item(new TokenVerificationResult(OidcUtils.decodeJwtContent(request.getToken().getToken()), null));
@@ -138,13 +142,13 @@ public Uni apply(UserInfo userInfo, Throwable t) {
primaryTokenUni = verifySelfSignedTokenUni(resolvedContext, request.getToken().getToken());
}
} else {
- primaryTokenUni = verifyTokenUni(vertxContext, resolvedContext, request.getToken().getToken(),
+ primaryTokenUni = verifyTokenUni(requestData, resolvedContext, request.getToken().getToken(),
isIdToken(request), null);
}
// Verify Code Flow access token first if it is available and has to be verified.
// It may be refreshed if it has or has nearly expired
- Uni codeAccessTokenUni = verifyCodeFlowAccessTokenUni(vertxContext, request,
+ Uni codeAccessTokenUni = verifyCodeFlowAccessTokenUni(requestData, request,
resolvedContext,
null);
return codeAccessTokenUni.onItemOrFailure().transformToUni(
@@ -156,23 +160,23 @@ public Uni apply(TokenVerificationResult codeAccessTokenResult
: new AuthenticationFailedException(t));
}
if (codeAccessTokenResult != null) {
- if (tokenAutoRefreshPrepared(codeAccessTokenResult, vertxContext,
+ if (tokenAutoRefreshPrepared(codeAccessTokenResult, requestData,
resolvedContext.oidcConfig)) {
return Uni.createFrom().failure(new TokenAutoRefreshException(null));
}
- vertxContext.put(CODE_ACCESS_TOKEN_RESULT, codeAccessTokenResult);
+ requestData.put(CODE_ACCESS_TOKEN_RESULT, codeAccessTokenResult);
}
- return getUserInfoAndCreateIdentity(primaryTokenUni, vertxContext, request, resolvedContext);
+ return getUserInfoAndCreateIdentity(primaryTokenUni, requestData, request, resolvedContext);
}
});
}
}
- private Uni validateTokenWithUserInfoAndCreateIdentity(RoutingContext vertxContext,
+ private Uni validateTokenWithUserInfoAndCreateIdentity(Map requestData,
TokenAuthenticationRequest request,
TenantConfigContext resolvedContext, UserInfo userInfo) {
- Uni codeAccessTokenUni = verifyCodeFlowAccessTokenUni(vertxContext, request, resolvedContext,
+ Uni codeAccessTokenUni = verifyCodeFlowAccessTokenUni(requestData, request, resolvedContext,
userInfo);
return codeAccessTokenUni.onItemOrFailure().transformToUni(
@@ -184,10 +188,10 @@ public Uni apply(TokenVerificationResult codeAccessToken, Thro
}
if (codeAccessToken != null) {
- vertxContext.put(CODE_ACCESS_TOKEN_RESULT, codeAccessToken);
+ requestData.put(CODE_ACCESS_TOKEN_RESULT, codeAccessToken);
}
- Uni tokenUni = verifyTokenUni(vertxContext, resolvedContext,
+ Uni tokenUni = verifyTokenUni(requestData, resolvedContext,
request.getToken().getToken(),
false, userInfo);
@@ -200,7 +204,7 @@ public Uni apply(TokenVerificationResult result, Throwable t)
return Uni.createFrom().failure(new AuthenticationFailedException(t));
}
- return createSecurityIdentityWithOidcServer(result, vertxContext, request,
+ return createSecurityIdentityWithOidcServer(result, requestData, request,
resolvedContext, userInfo);
}
});
@@ -210,7 +214,7 @@ public Uni apply(TokenVerificationResult result, Throwable t)
}
private Uni getUserInfoAndCreateIdentity(Uni tokenUni,
- RoutingContext vertxContext, TokenAuthenticationRequest request,
+ Map requestData, TokenAuthenticationRequest request,
TenantConfigContext resolvedContext) {
return tokenUni.onItemOrFailure()
@@ -221,19 +225,19 @@ public Uni apply(TokenVerificationResult result, Throwable t)
return Uni.createFrom().failure(new AuthenticationFailedException(t));
}
if (resolvedContext.oidcConfig.authentication.isUserInfoRequired().orElse(false)) {
- return getUserInfoUni(vertxContext, request, resolvedContext).onItemOrFailure().transformToUni(
+ return getUserInfoUni(requestData, request, resolvedContext).onItemOrFailure().transformToUni(
new BiFunction>() {
@Override
public Uni apply(UserInfo userInfo, Throwable t) {
if (t != null) {
return Uni.createFrom().failure(new AuthenticationFailedException(t));
}
- return createSecurityIdentityWithOidcServer(result, vertxContext, request,
+ return createSecurityIdentityWithOidcServer(result, requestData, request,
resolvedContext, userInfo);
}
});
} else {
- return createSecurityIdentityWithOidcServer(result, vertxContext, request, resolvedContext, null);
+ return createSecurityIdentityWithOidcServer(result, requestData, request, resolvedContext, null);
}
}
@@ -241,21 +245,21 @@ public Uni apply(UserInfo userInfo, Throwable t) {
}
- private boolean isOpaqueAccessToken(RoutingContext vertxContext, TokenAuthenticationRequest request,
+ private boolean isOpaqueAccessToken(Map requestData, TokenAuthenticationRequest request,
TenantConfigContext resolvedContext) {
if (request.getToken() instanceof AccessTokenCredential) {
return ((AccessTokenCredential) request.getToken()).isOpaque();
} else if (request.getToken() instanceof IdTokenCredential
&& (resolvedContext.oidcConfig.authentication.verifyAccessToken
|| resolvedContext.oidcConfig.roles.source.orElse(null) == Source.accesstoken)) {
- final String codeAccessToken = (String) vertxContext.get(OidcConstants.ACCESS_TOKEN_VALUE);
+ final String codeAccessToken = (String) requestData.get(OidcConstants.ACCESS_TOKEN_VALUE);
return OidcUtils.isOpaqueToken(codeAccessToken);
}
return false;
}
private Uni createSecurityIdentityWithOidcServer(TokenVerificationResult result,
- RoutingContext vertxContext, TokenAuthenticationRequest request, TenantConfigContext resolvedContext,
+ Map requestData, TokenAuthenticationRequest request, TenantConfigContext resolvedContext,
final UserInfo userInfo) {
// Token has been verified, as a JWT or an opaque token, possibly involving
@@ -279,14 +283,14 @@ private Uni createSecurityIdentityWithOidcServer(TokenVerifica
return Uni.createFrom().failure(new AuthenticationCompletionException(errorMessage));
}
- JsonObject rolesJson = getRolesJson(vertxContext, resolvedContext, tokenCred, tokenJson,
+ JsonObject rolesJson = getRolesJson(requestData, resolvedContext, tokenCred, tokenJson,
userInfo);
- SecurityIdentity securityIdentity = validateAndCreateIdentity(vertxContext, tokenCred,
- resolvedContext, tokenJson, rolesJson, userInfo, result.introspectionResult);
+ SecurityIdentity securityIdentity = validateAndCreateIdentity(requestData, tokenCred,
+ resolvedContext, tokenJson, rolesJson, userInfo, result.introspectionResult, request);
// If the primary token is a bearer access token then there's no point of checking if
// it should be refreshed as RT is only available for the code flow tokens
if (isIdToken(request)
- && tokenAutoRefreshPrepared(result, vertxContext, resolvedContext.oidcConfig)) {
+ && tokenAutoRefreshPrepared(result, requestData, resolvedContext.oidcConfig)) {
return Uni.createFrom().failure(new TokenAutoRefreshException(securityIdentity));
} else {
return Uni.createFrom().item(securityIdentity);
@@ -344,14 +348,15 @@ public String getName() {
OidcUtils.setSecurityIdentityRoles(builder, resolvedContext.oidcConfig, rolesJson);
OidcUtils.setSecurityIdentityPermissions(builder, resolvedContext.oidcConfig, rolesJson);
}
- OidcUtils.setBlockingApiAttribute(builder, vertxContext);
OidcUtils.setTenantIdAttribute(builder, resolvedContext.oidcConfig);
+ var vertxContext = getRoutingContextAttribute(request);
+ OidcUtils.setBlockingApiAttribute(builder, vertxContext);
OidcUtils.setRoutingContextAttribute(builder, vertxContext);
SecurityIdentity identity = builder.build();
// If the primary token is a bearer access token then there's no point of checking if
// it should be refreshed as RT is only available for the code flow tokens
if (isIdToken(request)
- && tokenAutoRefreshPrepared(result, vertxContext, resolvedContext.oidcConfig)) {
+ && tokenAutoRefreshPrepared(result, requestData, resolvedContext.oidcConfig)) {
return Uni.createFrom().failure(new TokenAutoRefreshException(identity));
}
return Uni.createFrom().item(identity);
@@ -367,12 +372,12 @@ private static boolean isIdToken(TokenAuthenticationRequest request) {
return request.getToken() instanceof IdTokenCredential;
}
- private static boolean tokenAutoRefreshPrepared(TokenVerificationResult result, RoutingContext vertxContext,
+ private static boolean tokenAutoRefreshPrepared(TokenVerificationResult result, Map requestData,
OidcTenantConfig oidcConfig) {
if (result != null && oidcConfig.token.refreshExpired
&& oidcConfig.token.getRefreshTokenTimeSkew().isPresent()
- && vertxContext.get(REFRESH_TOKEN_GRANT_RESPONSE) != Boolean.TRUE
- && vertxContext.get(NEW_AUTHENTICATION) != Boolean.TRUE) {
+ && requestData.get(REFRESH_TOKEN_GRANT_RESPONSE) != Boolean.TRUE
+ && requestData.get(NEW_AUTHENTICATION) != Boolean.TRUE) {
Long expiry = null;
if (result.localVerificationResult != null) {
expiry = result.localVerificationResult.getLong(Claims.exp.name());
@@ -388,7 +393,7 @@ private static boolean tokenAutoRefreshPrepared(TokenVerificationResult result,
return false;
}
- private static JsonObject getRolesJson(RoutingContext vertxContext, TenantConfigContext resolvedContext,
+ private static JsonObject getRolesJson(Map requestData, TenantConfigContext resolvedContext,
TokenCredential tokenCred,
JsonObject tokenJson, UserInfo userInfo) {
JsonObject rolesJson = tokenJson;
@@ -397,32 +402,32 @@ private static JsonObject getRolesJson(RoutingContext vertxContext, TenantConfig
rolesJson = new JsonObject(userInfo.getJsonObject().toString());
} else if (tokenCred instanceof IdTokenCredential
&& resolvedContext.oidcConfig.roles.source.get() == Source.accesstoken) {
- rolesJson = ((TokenVerificationResult) vertxContext.get(CODE_ACCESS_TOKEN_RESULT)).localVerificationResult;
+ rolesJson = ((TokenVerificationResult) requestData.get(CODE_ACCESS_TOKEN_RESULT)).localVerificationResult;
if (rolesJson == null) {
// JSON token representation may be null not only if it is an opaque access token
// but also if it is JWT and no JWK with a matching kid is available, asynchronous
// JWK refresh has not finished yet, but the fallback introspection request has succeeded.
- rolesJson = OidcUtils.decodeJwtContent((String) vertxContext.get(OidcConstants.ACCESS_TOKEN_VALUE));
+ rolesJson = OidcUtils.decodeJwtContent((String) requestData.get(OidcConstants.ACCESS_TOKEN_VALUE));
}
}
}
return rolesJson;
}
- private Uni verifyCodeFlowAccessTokenUni(RoutingContext vertxContext,
+ private Uni verifyCodeFlowAccessTokenUni(Map requestData,
TokenAuthenticationRequest request,
TenantConfigContext resolvedContext, UserInfo userInfo) {
if (request.getToken() instanceof IdTokenCredential
&& (resolvedContext.oidcConfig.authentication.verifyAccessToken
|| resolvedContext.oidcConfig.roles.source.orElse(null) == Source.accesstoken)) {
- final String codeAccessToken = (String) vertxContext.get(OidcConstants.ACCESS_TOKEN_VALUE);
- return verifyTokenUni(vertxContext, resolvedContext, codeAccessToken, false, userInfo);
+ final String codeAccessToken = (String) requestData.get(OidcConstants.ACCESS_TOKEN_VALUE);
+ return verifyTokenUni(requestData, resolvedContext, codeAccessToken, false, userInfo);
} else {
return NULL_CODE_ACCESS_TOKEN_UNI;
}
}
- private Uni verifyTokenUni(RoutingContext vertxContext, TenantConfigContext resolvedContext,
+ private Uni verifyTokenUni(Map requestData, TenantConfigContext resolvedContext,
String token, boolean enforceAudienceVerification, UserInfo userInfo) {
if (OidcUtils.isOpaqueToken(token)) {
if (!resolvedContext.oidcConfig.token.allowOpaqueTokenIntrospection) {
@@ -449,7 +454,7 @@ private Uni verifyTokenUni(RoutingContext vertxContext,
return introspectTokenUni(resolvedContext, token, false);
} else {
// Verify JWT token with the local JWK keys with a possible remote introspection fallback
- final String nonce = vertxContext.get(OidcConstants.NONCE);
+ final String nonce = (String) requestData.get(OidcConstants.NONCE);
try {
LOG.debug("Verifying the JWT token with the local JWK keys");
return Uni.createFrom()
@@ -541,14 +546,14 @@ private static Uni validateTokenWithoutOidcServer(TokenAuthent
TokenVerificationResult result = resolvedContext.provider.verifyJwtToken(request.getToken().getToken(), false,
false, null);
return Uni.createFrom()
- .item(validateAndCreateIdentity(null, request.getToken(), resolvedContext,
- result.localVerificationResult, result.localVerificationResult, null, null));
+ .item(validateAndCreateIdentity(Map.of(), request.getToken(), resolvedContext,
+ result.localVerificationResult, result.localVerificationResult, null, null, request));
} catch (Throwable t) {
return Uni.createFrom().failure(new AuthenticationFailedException(t));
}
}
- private Uni getUserInfoUni(RoutingContext vertxContext, TokenAuthenticationRequest request,
+ private Uni getUserInfoUni(Map requestData, TokenAuthenticationRequest request,
TenantConfigContext resolvedContext) {
if (isInternalIdToken(request) && resolvedContext.oidcConfig.cacheUserInfoInIdtoken) {
JsonObject userInfo = OidcUtils.decodeJwtContent(request.getToken().getToken())
@@ -559,7 +564,7 @@ private Uni getUserInfoUni(RoutingContext vertxContext, TokenAuthentic
}
LOG.debug("Requesting UserInfo");
- String contextAccessToken = vertxContext.get(OidcConstants.ACCESS_TOKEN_VALUE);
+ String contextAccessToken = (String) requestData.get(OidcConstants.ACCESS_TOKEN_VALUE);
final String accessToken = contextAccessToken != null ? contextAccessToken : request.getToken().getToken();
UserInfoCache userInfoCache = tenantResolver.getUserInfoCache();
diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java
index f073070c4c0df..f53e0d0a0e23b 100644
--- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java
+++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcRecorder.java
@@ -1,5 +1,8 @@
package io.quarkus.oidc.runtime;
+import static io.quarkus.oidc.runtime.OidcUtils.DEFAULT_TENANT_ID;
+import static io.quarkus.vertx.http.runtime.security.HttpSecurityUtils.getRoutingContextAttribute;
+
import java.security.Key;
import java.util.HashMap;
import java.util.List;
@@ -18,6 +21,7 @@
import io.quarkus.arc.Arc;
import io.quarkus.arc.ArcContainer;
+import io.quarkus.oidc.AccessTokenCredential;
import io.quarkus.oidc.OIDCException;
import io.quarkus.oidc.OidcConfigurationMetadata;
import io.quarkus.oidc.OidcTenantConfig;
@@ -25,6 +29,7 @@
import io.quarkus.oidc.OidcTenantConfig.Roles.Source;
import io.quarkus.oidc.OidcTenantConfig.TokenStateManager.Strategy;
import io.quarkus.oidc.TenantConfigResolver;
+import io.quarkus.oidc.TenantIdentityProvider;
import io.quarkus.oidc.common.OidcClientRequestFilter;
import io.quarkus.oidc.common.runtime.OidcCommonConfig;
import io.quarkus.oidc.common.runtime.OidcCommonUtils;
@@ -33,6 +38,10 @@
import io.quarkus.runtime.TlsConfig;
import io.quarkus.runtime.annotations.Recorder;
import io.quarkus.runtime.configuration.ConfigurationException;
+import io.quarkus.security.identity.AuthenticationRequestContext;
+import io.quarkus.security.identity.SecurityIdentity;
+import io.quarkus.security.identity.request.TokenAuthenticationRequest;
+import io.quarkus.security.spi.runtime.BlockingSecurityExecutor;
import io.quarkus.security.spi.runtime.MethodDescription;
import io.smallrye.jwt.algorithm.KeyEncryptionAlgorithm;
import io.smallrye.jwt.util.KeyUtils;
@@ -63,7 +72,7 @@ public DefaultTokenIntrospectionUserInfoCache get() {
public Supplier setup(OidcConfig config, Supplier vertx, TlsConfig tlsConfig) {
final Vertx vertxValue = vertx.get();
- String defaultTenantId = config.defaultTenant.getTenantId().orElse(OidcUtils.DEFAULT_TENANT_ID);
+ String defaultTenantId = config.defaultTenant.getTenantId().orElse(DEFAULT_TENANT_ID);
TenantConfigContext defaultTenantContext = createStaticTenantContext(vertxValue, config.defaultTenant,
!config.namedTenants.isEmpty(), tlsConfig, defaultTenantId);
@@ -177,7 +186,7 @@ private Uni createTenantContext(Vertx vertx, OidcTenantConf
try {
if (!oidcConfig.getAuthServerUrl().isPresent()) {
- if (OidcUtils.DEFAULT_TENANT_ID.equals(oidcConfig.tenantId.get())) {
+ if (DEFAULT_TENANT_ID.equals(oidcConfig.tenantId.get())) {
ArcContainer container = Arc.container();
if (container != null
&& (container.instance(TenantConfigResolver.class).isAvailable() || checkNamedTenants)) {
@@ -498,4 +507,66 @@ public void accept(RoutingContext routingContext) {
}
};
}
+
+ public Supplier createTenantIdentityProvider(String tenantName) {
+ return new Supplier() {
+ @Override
+ public TenantIdentityProvider get() {
+ return new TenantSpecificOidcIdentityProvider(tenantName);
+ }
+ };
+ }
+
+ private static final class TenantSpecificOidcIdentityProvider extends OidcIdentityProvider
+ implements TenantIdentityProvider {
+
+ private final String tenantId;
+ private final BlockingSecurityExecutor blockingExecutor;
+
+ private TenantSpecificOidcIdentityProvider(String tenantId) {
+ super(Arc.container().instance(DefaultTenantConfigResolver.class).get(),
+ Arc.container().instance(BlockingSecurityExecutor.class).get());
+ this.blockingExecutor = Arc.container().instance(BlockingSecurityExecutor.class).get();
+ if (tenantId.equals(DEFAULT_TENANT_ID)) {
+ OidcConfig config = Arc.container().instance(OidcConfig.class).get();
+ this.tenantId = config.defaultTenant.getTenantId().orElse(OidcUtils.DEFAULT_TENANT_ID);
+ } else {
+ this.tenantId = tenantId;
+ }
+ }
+
+ @Override
+ public Uni authenticate(AccessTokenCredential token) {
+ return authenticate(new TokenAuthenticationRequest(token));
+ }
+
+ @Override
+ protected Uni resolveTenantConfigContext(TokenAuthenticationRequest request,
+ AuthenticationRequestContext context) {
+ return tenantResolver.resolveContext(tenantId).onItem().ifNull().failWith(new Supplier() {
+ @Override
+ public Throwable get() {
+ return new OIDCException("Failed to resolve tenant context");
+ }
+ });
+ }
+
+ @Override
+ protected Map getRequestData(TokenAuthenticationRequest request) {
+ RoutingContext context = getRoutingContextAttribute(request);
+ if (context != null) {
+ return context.data();
+ }
+ return new HashMap<>();
+ }
+
+ private Uni authenticate(TokenAuthenticationRequest request) {
+ return authenticate(request, new AuthenticationRequestContext() {
+ @Override
+ public Uni runBlocking(Supplier function) {
+ return blockingExecutor.executeBlocking(function);
+ }
+ });
+ }
+ }
}
diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java
index d276832377c44..7b3d41938b0b5 100644
--- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java
+++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java
@@ -1,6 +1,7 @@
package io.quarkus.oidc.runtime;
import static io.quarkus.oidc.common.runtime.OidcConstants.TOKEN_SCOPE;
+import static io.quarkus.vertx.http.runtime.security.HttpSecurityUtils.getRoutingContextAttribute;
import java.nio.charset.StandardCharsets;
import java.security.Key;
@@ -14,6 +15,7 @@
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
import java.util.StringTokenizer;
import java.util.function.Consumer;
import java.util.function.Function;
@@ -45,6 +47,7 @@
import io.quarkus.security.StringPermission;
import io.quarkus.security.credential.TokenCredential;
import io.quarkus.security.identity.AuthenticationRequestContext;
+import io.quarkus.security.identity.request.TokenAuthenticationRequest;
import io.quarkus.security.runtime.QuarkusSecurityIdentity;
import io.quarkus.security.runtime.QuarkusSecurityIdentity.Builder;
import io.smallrye.jwt.algorithm.ContentEncryptionAlgorithm;
@@ -239,16 +242,14 @@ private static List convertJsonArrayToList(JsonArray claimValue) {
return list;
}
- static QuarkusSecurityIdentity validateAndCreateIdentity(
- RoutingContext vertxContext, TokenCredential credential,
+ static QuarkusSecurityIdentity validateAndCreateIdentity(Map requestData, TokenCredential credential,
TenantConfigContext resolvedContext, JsonObject tokenJson, JsonObject rolesJson, UserInfo userInfo,
- TokenIntrospection introspectionResult) {
+ TokenIntrospection introspectionResult, TokenAuthenticationRequest request) {
OidcTenantConfig config = resolvedContext.oidcConfig;
QuarkusSecurityIdentity.Builder builder = QuarkusSecurityIdentity.builder();
builder.addCredential(credential);
- AuthorizationCodeTokens codeTokens = vertxContext != null ? vertxContext.get(AuthorizationCodeTokens.class.getName())
- : null;
+ AuthorizationCodeTokens codeTokens = (AuthorizationCodeTokens) requestData.get(AuthorizationCodeTokens.class.getName());
if (codeTokens != null) {
RefreshToken refreshTokenCredential = new RefreshToken(codeTokens.getRefreshToken());
builder.addCredential(refreshTokenCredential);
@@ -265,6 +266,7 @@ static QuarkusSecurityIdentity validateAndCreateIdentity(
}
builder.addAttribute(QUARKUS_IDENTITY_EXPIRE_TIME, jwtPrincipal.getExpirationTime());
builder.setPrincipal(jwtPrincipal);
+ var vertxContext = getRoutingContextAttribute(request);
setRoutingContextAttribute(builder, vertxContext);
setSecurityIdentityRoles(builder, config, rolesJson);
setSecurityIdentityPermissions(builder, config, rolesJson);
diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/TokenCustomizerFinder.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/TokenCustomizerFinder.java
index b8c12d52fdd63..d09633054b5fa 100644
--- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/TokenCustomizerFinder.java
+++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/TokenCustomizerFinder.java
@@ -28,13 +28,10 @@ public static TokenCustomizer find(OidcTenantConfig oidcConfig) {
} else {
throw new OIDCException("Unable to find TokenCustomizer " + customizerName);
}
- } else {
- for (InstanceHandle tokenCustomizer : container.listAll(TokenCustomizer.class)) {
- TenantFeature tenantAnn = tokenCustomizer.get().getClass().getAnnotation(TenantFeature.class);
- if (tenantAnn != null && oidcConfig.tenantId.get().equals(tenantAnn.value())) {
- return tokenCustomizer.get();
- }
- }
+ } else if (oidcConfig.tenantId.isPresent()) {
+ return container
+ .instance(TokenCustomizer.class, TenantFeature.TenantFeatureLiteral.of(oidcConfig.tenantId.get()))
+ .get();
}
}
return null;
diff --git a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/OpenTelemetryLegacyConfigurationTest.java b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/OpenTelemetryLegacyConfigurationTest.java
index 775b2157341ef..3067e5df896b1 100644
--- a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/OpenTelemetryLegacyConfigurationTest.java
+++ b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/OpenTelemetryLegacyConfigurationTest.java
@@ -57,7 +57,7 @@ void config() {
assertEquals(TRUE, oTelRuntimeConfig.traces().includeStaticResources());
assertEquals("always_off", oTelBuildConfig.traces().sampler());
assertTrue(oTelRuntimeConfig.traces().samplerArg().isPresent());
- assertEquals(2.0d, oTelRuntimeConfig.traces().samplerArg().get());
+ assertEquals("2.0d", oTelRuntimeConfig.traces().samplerArg().get());
assertEquals(FALSE, otlpExporterBuildConfig.enabled());
assertTrue(otlpExporterRuntimeConfig.traces().legacyEndpoint().isPresent());
assertTrue(otlpExporterRuntimeConfig.traces().headers().isPresent());
diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/TracesRuntimeConfig.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/TracesRuntimeConfig.java
index b1dc033c51bd2..07b6abb8eaca5 100644
--- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/TracesRuntimeConfig.java
+++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/runtime/TracesRuntimeConfig.java
@@ -39,12 +39,16 @@ public interface TracesRuntimeConfig {
Boolean includeStaticResources();
/**
- * An argument to the configured tracer if supported, for example a ratio.
+ * Sampler argument. Depends on the `quarkus.otel.traces.sampler` property.
+ * Fallbacks to the legacy property quarkus.opentelemetry.tracer.sampler.ratio.
*
- * Fallbacks to the legacy property quarkus.opentelemetry.tracer.sampler.ratio
- * or defaults to `1.0`.
+ * When setting the stock sampler to `traceidratio` or `parentbased_traceidratio` you need to set a `double` compatible
+ * value between `0.0d` and `1.0d`, like `0.01d` or `0.5d`. It is kept as a `String` to allow the flexible customisation of
+ * alternative samplers.
+ *
+ * Defaults to `1.0d`.
*/
@WithName("sampler.arg")
@WithDefault("1.0d")
- Optional samplerArg();
+ Optional samplerArg();
}
diff --git a/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/DataSourceReactivePostgreSQLConfig.java b/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/DataSourceReactivePostgreSQLConfig.java
index 681e4d0e6b3af..46c5d640e719c 100644
--- a/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/DataSourceReactivePostgreSQLConfig.java
+++ b/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/DataSourceReactivePostgreSQLConfig.java
@@ -5,6 +5,7 @@
import io.quarkus.runtime.annotations.ConfigDocDefault;
import io.quarkus.runtime.annotations.ConfigGroup;
+import io.smallrye.config.WithDefault;
import io.vertx.pgclient.SslMode;
@ConfigGroup
@@ -23,4 +24,13 @@ public interface DataSourceReactivePostgreSQLConfig {
*/
@ConfigDocDefault("disable")
Optional sslMode();
+
+ /**
+ * Level 7 proxies can load balance queries on several connections to the actual database.
+ * When it happens, the client can be confused by the lack of session affinity and unwanted errors can happen like
+ * ERROR: unnamed prepared statement does not exist (26000).
+ * See Using a level 7 proxy
+ */
+ @WithDefault("false")
+ boolean useLayer7Proxy();
}
diff --git a/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/PgPoolRecorder.java b/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/PgPoolRecorder.java
index 053c68a56708f..18149f6e67b1a 100644
--- a/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/PgPoolRecorder.java
+++ b/extensions/reactive-pg-client/runtime/src/main/java/io/quarkus/reactive/pg/client/runtime/PgPoolRecorder.java
@@ -207,6 +207,8 @@ private List toPgConnectOptions(String dataSourceName, DataSou
}
}
+ pgConnectOptions.setUseLayer7Proxy(dataSourceReactivePostgreSQLConfig.useLayer7Proxy());
+
pgConnectOptions.setTrustAll(dataSourceReactiveRuntimeConfig.trustAll());
configurePemTrustOptions(pgConnectOptions, dataSourceReactiveRuntimeConfig.trustCertificatePem());
diff --git a/extensions/resteasy-reactive/rest-client-reactive-jackson/runtime/src/main/java/io/quarkus/rest/client/reactive/jackson/runtime/serialisers/ClientJacksonMessageBodyReader.java b/extensions/resteasy-reactive/rest-client-reactive-jackson/runtime/src/main/java/io/quarkus/rest/client/reactive/jackson/runtime/serialisers/ClientJacksonMessageBodyReader.java
index 63c4fb8cec20c..6ba0cb7f386c7 100644
--- a/extensions/resteasy-reactive/rest-client-reactive-jackson/runtime/src/main/java/io/quarkus/rest/client/reactive/jackson/runtime/serialisers/ClientJacksonMessageBodyReader.java
+++ b/extensions/resteasy-reactive/rest-client-reactive-jackson/runtime/src/main/java/io/quarkus/rest/client/reactive/jackson/runtime/serialisers/ClientJacksonMessageBodyReader.java
@@ -33,7 +33,6 @@ public class ClientJacksonMessageBodyReader extends JacksonBasicMessageBodyReade
private static final Logger log = Logger.getLogger(ClientJacksonMessageBodyReader.class);
- private final ConcurrentMap contextResolverMap = new ConcurrentHashMap<>();
private final ConcurrentMap objectReaderMap = new ConcurrentHashMap<>();
private RestClientRequestContext context;
@@ -49,7 +48,7 @@ public Object readFrom(Class