diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b8f4262ca5ce8..8ee03c3d999c4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -214,7 +214,7 @@ If your changes affect only the documentation, run: ```sh ./gradlew -p docs check ``` -For more information about testing code examples in the documentation, see +For more information about testing code examples in the documentation, see https://github.com/elastic/elasticsearch/blob/master/docs/README.asciidoc ### Project layout @@ -305,6 +305,39 @@ the `qa` subdirectory functions just like the top level `qa` subdirectory. The Elasticsearch process. The `transport-client` subdirectory contains extensions to Elasticsearch's standard transport client to work properly with x-pack. +### Gradle Build + +We use Gradle to build Elasticsearch because it is flexible enough to not only +build and package Elasticsearch, but also orchestrate all of the ways that we +have to test Elasticsearch. + +#### Configurations + +Gradle organizes dependencies and build artifacts into "configurations" and +allows you to use these configurations arbitrarilly. Here are some of the most +common configurations in our build and how we use them: + +
+
`compile`
Code that is on the classpath at both compile and +runtime. If the [`shadow`][shadow-plugin] plugin is applied to the project then +this code is bundled into the jar produced by the project.
+
`runtime`
Code that is not on the classpath at compile time but is +on the classpath at runtime. We mostly use this configuration to make sure that +we do not accidentally compile against dependencies of our dependencies also +known as "transitive" dependencies".
+
`compileOnly`
Code that is on the classpath at comile time but that +should not be shipped with the project because it is "provided" by the runtime +somehow. Elasticsearch plugins use this configuration to include dependencies +that are bundled with Elasticsearch's server.
+
`shadow`
Only available in projects with the shadow plugin. Code +that is on the classpath at both compile and runtime but it *not* bundled into +the jar produced by the project. If you depend on a project with the `shadow` +plugin then you need to depend on this configuration because it will bring +along all of the dependencies you need at runtime.
+
`testCompile`
Code that is on the classpath for compiling tests +that are part of this project but not production code. The canonical example +of this is `junit`.
+
Contributing as part of a class ------------------------------- @@ -337,3 +370,4 @@ repeating in this section because it has come up in this context. [eclipse]: http://www.eclipse.org/community/eclipse_newsletter/2017/june/ [intellij]: https://blog.jetbrains.com/idea/2017/07/intellij-idea-2017-2-is-here-smart-sleek-and-snappy/ +[shadow-plugin]: https://github.com/johnrengelman/shadow diff --git a/build.gradle b/build.gradle index 66f34d8f445de..8df89cb064ce4 100644 --- a/build.gradle +++ b/build.gradle @@ -516,6 +516,31 @@ allprojects { tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings) } +allprojects { + /* + * IntelliJ and Eclipse don't know about the shadow plugin so when we're + * in "IntelliJ mode" or "Eclipse mode" add "runtime" dependencies + * eveywhere where we see a "shadow" dependency which will cause them to + * reference shadowed projects directly rather than rely on the shadowing + * to include them. This is the correct thing for it to do because it + * doesn't run the jar shadowing at all. This isn't needed for the project + * itself because the IDE configuration is done by SourceSets but it is + * *is* needed for projects that depends on the project doing the shadowing. + * Without this they won't properly depend on the shadowed project. + */ + if (isEclipse || isIdea) { + configurations.all { Configuration configuration -> + dependencies.all { Dependency dep -> + if (dep instanceof ProjectDependency) { + if (dep.getTargetConfiguration() == 'shadow') { + configuration.dependencies.add(project.dependencies.project(path: dep.dependencyProject.path, configuration: 'runtime')) + } + } + } + } + } +} + // we need to add the same --debug-jvm option as // the real RunTask has, so we can pass it through class Run extends DefaultTask { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 5da5912dabe51..94a8bf26177b3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -131,6 +131,9 @@ class BuildPlugin implements Plugin { runtimeJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, runtimeJavaHome)) } + String inFipsJvmScript = 'print(java.security.Security.getProviders()[0].name.toLowerCase().contains("fips"));' + boolean inFipsJvm = Boolean.parseBoolean(runJavascript(project, runtimeJavaHome, inFipsJvmScript)) + // Build debugging info println '=======================================' println 'Elasticsearch Build Hamster says Hello!' @@ -202,6 +205,7 @@ class BuildPlugin implements Plugin { project.rootProject.ext.buildChecksDone = true project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion + project.rootProject.ext.inFipsJvm = inFipsJvm } project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion @@ -213,6 +217,7 @@ class BuildPlugin implements Plugin { project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion project.ext.javaVersions = project.rootProject.ext.javaVersions + project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm } private static String findCompilerJavaHome() { @@ -386,6 +391,9 @@ class BuildPlugin implements Plugin { project.configurations.compile.dependencies.all(disableTransitiveDeps) project.configurations.testCompile.dependencies.all(disableTransitiveDeps) project.configurations.compileOnly.dependencies.all(disableTransitiveDeps) + project.plugins.withType(ShadowPlugin).whenPluginAdded { + project.configurations.shadow.dependencies.all(disableTransitiveDeps) + } } /** Adds repositories used by ES dependencies */ @@ -770,7 +778,11 @@ class BuildPlugin implements Plugin { systemProperty property.getKey(), property.getValue() } } - + // Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM + if (project.inFipsJvm) { + systemProperty 'javax.net.ssl.trustStorePassword', 'password' + systemProperty 'javax.net.ssl.keyStorePassword', 'password' + } boolean assertionsEnabled = Boolean.parseBoolean(System.getProperty('tests.asserts', 'true')) enableSystemAssertions assertionsEnabled enableAssertions assertionsEnabled @@ -873,11 +885,20 @@ class BuildPlugin implements Plugin { project.dependencyLicenses.dependencies = project.configurations.runtime.fileCollection { it.group.startsWith('org.elasticsearch') == false } - project.configurations.compileOnly + project.plugins.withType(ShadowPlugin).whenPluginAdded { + project.dependencyLicenses.dependencies += project.configurations.shadow.fileCollection { + it.group.startsWith('org.elasticsearch') == false + } + } } private static configureDependenciesInfo(Project project) { Task deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask.class) deps.runtimeConfiguration = project.configurations.runtime + project.plugins.withType(ShadowPlugin).whenPluginAdded { + deps.runtimeConfiguration = project.configurations.create('infoDeps') + deps.runtimeConfiguration.extendsFrom(project.configurations.runtime, project.configurations.shadow) + } deps.compileOnlyConfiguration = project.configurations.compileOnly project.afterEvaluate { deps.mappings = project.dependencyLicenses.mappings diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 7f6f337e8a906..00f178fda9c9f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -48,18 +48,6 @@ public class PluginBuildPlugin extends BuildPlugin { @Override public void apply(Project project) { super.apply(project) - project.plugins.withType(ShadowPlugin).whenPluginAdded { - /* - * We've not tested these plugins together and we're fairly sure - * they aren't going to work properly as is *and* we're not really - * sure *why* you'd want to shade stuff in plugins. So we throw an - * exception here to make you come and read this comment. If you - * have a need for shadow while building plugins then know that you - * are probably going to have to fight with gradle for a while.... - */ - throw new InvalidUserDataException('elasticsearch.esplugin is not ' - + 'compatible with com.github.johnrengelman.shadow'); - } configureDependencies(project) // this afterEvaluate must happen before the afterEvaluate added by integTest creation, // so that the file name resolution for installing the plugin will be setup @@ -153,8 +141,13 @@ public class PluginBuildPlugin extends BuildPlugin { include(buildProperties.descriptorOutput.name) } from pluginMetadata // metadata (eg custom security policy) - from project.jar // this plugin's jar - from project.configurations.runtime - project.configurations.compileOnly // the dep jars + /* + * If the plugin is using the shadow plugin then we need to bundle + * "shadow" things rather than the default jar and dependencies so + * we don't hit jar hell. + */ + from { project.plugins.hasPlugin(ShadowPlugin) ? project.shadowJar : project.jar } + from { project.plugins.hasPlugin(ShadowPlugin) ? project.configurations.shadow : project.configurations.runtime - project.configurations.compileOnly } // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java new file mode 100644 index 0000000000000..587578f3b35e1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; + +import java.io.IOException; + +import static java.util.Collections.emptySet; + +/** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic License-related methods + *

+ * See the + * X-Pack Licensing APIs on elastic.co for more information. + */ +public class LicenseClient { + + private final RestHighLevelClient restHighLevelClient; + + LicenseClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Updates license for the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public PutLicenseResponse putLicense(PutLicenseRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::putLicense, options, + PutLicenseResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously updates license for the cluster cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void putLicenseAsync(PutLicenseRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::putLicense, options, + PutLicenseResponse::fromXContent, listener, emptySet()); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index a6122b0681e91..b9360877dfcad 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -40,6 +40,7 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; @@ -108,6 +109,7 @@ import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -980,6 +982,20 @@ static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) { return request; } + static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) throws IOException { + String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(restoreSnapshotRequest.repository()) + .addPathPart(restoreSnapshotRequest.snapshot()) + .addPathPartAsIs("_restore") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params parameters = new Params(request); + parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout()); + parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion()); + request.setEntity(createEntity(restoreSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") .addPathPart(deleteSnapshotRequest.repository()) @@ -1124,6 +1140,18 @@ static Request xpackUsage(XPackUsageRequest usageRequest) { return request; } + static Request putLicense(PutLicenseRequest putLicenseRequest) { + Request request = new Request(HttpPut.METHOD_NAME, "/_xpack/license"); + Params parameters = new Params(request); + parameters.withTimeout(putLicenseRequest.timeout()); + parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); + if (putLicenseRequest.isAcknowledge()) { + parameters.putParam("acknowledge", "true"); + } + request.setJsonEntity(putLicenseRequest.getLicenseDefinition()); + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index ae115839baeaf..319eb96a9f899 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -30,6 +30,8 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; @@ -252,6 +254,36 @@ public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOp SnapshotsStatusResponse::fromXContent, listener, emptySet()); } + /** + * Restores a snapshot. + * See Snapshot and Restore + * API on elastic.co + * + * @param restoreSnapshotRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public RestoreSnapshotResponse restore(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options, + RestoreSnapshotResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously restores a snapshot. + * See Snapshot and Restore + * API on elastic.co + * + * @param restoreSnapshotRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void restoreAsync(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options, + RestoreSnapshotResponse::fromXContent, listener, emptySet()); + } + /** * Deletes a snapshot. * See Snapshot and Restore diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index 4acaadfdb85d5..1401376527df2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -42,10 +42,12 @@ public final class XPackClient { private final RestHighLevelClient restHighLevelClient; private final WatcherClient watcherClient; + private final LicenseClient licenseClient; XPackClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; this.watcherClient = new WatcherClient(restHighLevelClient); + this.licenseClient = new LicenseClient(restHighLevelClient); } public WatcherClient watcher() { @@ -100,4 +102,15 @@ public void usageAsync(XPackUsageRequest request, RequestOptions options, Action restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options, XPackUsageResponse::fromXContent, listener, emptySet()); } + + /** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic Licensing APIs. + *

+ * See the + * X-Pack APIs on elastic.co for more information. + */ + public LicenseClient license() { + return licenseClient; + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java index 2890257b236cd..0af270cb051ea 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java @@ -22,7 +22,11 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.rankeval.DiscountedCumulativeGain; import org.elasticsearch.index.rankeval.EvalQueryQuality; +import org.elasticsearch.index.rankeval.EvaluationMetric; +import org.elasticsearch.index.rankeval.ExpectedReciprocalRank; +import org.elasticsearch.index.rankeval.MeanReciprocalRank; import org.elasticsearch.index.rankeval.PrecisionAtK; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.index.rankeval.RankEvalResponse; @@ -35,8 +39,10 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -64,15 +70,7 @@ public void indexDocuments() throws IOException { * calculation where all unlabeled documents are treated as not relevant. */ public void testRankEvalRequest() throws IOException { - SearchSourceBuilder testQuery = new SearchSourceBuilder(); - testQuery.query(new MatchAllQueryBuilder()); - List amsterdamRatedDocs = createRelevant("index" , "amsterdam1", "amsterdam2", "amsterdam3", "amsterdam4"); - amsterdamRatedDocs.addAll(createRelevant("index2", "amsterdam0")); - RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery); - RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "berlin"), testQuery); - List specifications = new ArrayList<>(); - specifications.add(amsterdamRequest); - specifications.add(berlinRequest); + List specifications = createTestEvaluationSpec(); PrecisionAtK metric = new PrecisionAtK(1, false, 10); RankEvalSpec spec = new RankEvalSpec(specifications, metric); @@ -80,7 +78,7 @@ public void testRankEvalRequest() throws IOException { RankEvalResponse response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); // the expected Prec@ for the first query is 5/7 and the expected Prec@ for the second is 1/7, divided by 2 to get the average double expectedPrecision = (1.0 / 7.0 + 5.0 / 7.0) / 2.0; - assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE); + assertEquals(expectedPrecision, response.getMetricScore(), Double.MIN_VALUE); Map partialResults = response.getPartialResults(); assertEquals(2, partialResults.size()); EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query"); @@ -114,6 +112,38 @@ public void testRankEvalRequest() throws IOException { response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); } + private static List createTestEvaluationSpec() { + SearchSourceBuilder testQuery = new SearchSourceBuilder(); + testQuery.query(new MatchAllQueryBuilder()); + List amsterdamRatedDocs = createRelevant("index" , "amsterdam1", "amsterdam2", "amsterdam3", "amsterdam4"); + amsterdamRatedDocs.addAll(createRelevant("index2", "amsterdam0")); + RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery); + RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "berlin"), testQuery); + List specifications = new ArrayList<>(); + specifications.add(amsterdamRequest); + specifications.add(berlinRequest); + return specifications; + } + + /** + * Test case checks that the default metrics are registered and usable + */ + public void testMetrics() throws IOException { + List specifications = createTestEvaluationSpec(); + List> metrics = Arrays.asList(PrecisionAtK::new, MeanReciprocalRank::new, DiscountedCumulativeGain::new, + () -> new ExpectedReciprocalRank(1)); + double expectedScores[] = new double[] {0.4285714285714286, 0.75, 1.6408962261063627, 0.4407738095238095}; + int i = 0; + for (Supplier metricSupplier : metrics) { + RankEvalSpec spec = new RankEvalSpec(specifications, metricSupplier.get()); + + RankEvalRequest rankEvalRequest = new RankEvalRequest(spec, new String[] { "index", "index2" }); + RankEvalResponse response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); + assertEquals(expectedScores[i], response.getMetricScore(), Double.MIN_VALUE); + i++; + } + } + private static List createRelevant(String indexName, String... docs) { return Stream.of(docs).map(s -> new RatedDocument(indexName, s, 1)).collect(Collectors.toList()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index c1f47feb33d5a..0415d363c54cd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; @@ -2198,6 +2199,31 @@ public void testSnapshotsStatus() { assertThat(request.getEntity(), is(nullValue())); } + public void testRestoreSnapshot() throws IOException { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s/_restore", repository, snapshot); + + RestoreSnapshotRequest restoreSnapshotRequest = new RestoreSnapshotRequest(repository, snapshot); + setRandomMasterTimeout(restoreSnapshotRequest, expectedParams); + if (randomBoolean()) { + restoreSnapshotRequest.waitForCompletion(true); + expectedParams.put("wait_for_completion", "true"); + } + if (randomBoolean()) { + String timeout = randomTimeValue(); + restoreSnapshotRequest.masterNodeTimeout(timeout); + expectedParams.put("master_timeout", timeout); + } + + Request request = RequestConverters.restoreSnapshot(restoreSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertToXContentBody(restoreSnapshotRequest, request.getEntity()); + } + public void testDeleteSnapshot() { Map expectedParams = new HashMap<>(); String repository = randomIndicesNames(1, 1)[0]; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 64a344790caa0..5cf3b35275620 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.client; import com.fasterxml.jackson.core.JsonParseException; + import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -60,6 +61,7 @@ import org.elasticsearch.common.xcontent.smile.SmileXContent; import org.elasticsearch.index.rankeval.DiscountedCumulativeGain; import org.elasticsearch.index.rankeval.EvaluationMetric; +import org.elasticsearch.index.rankeval.ExpectedReciprocalRank; import org.elasticsearch.index.rankeval.MeanReciprocalRank; import org.elasticsearch.index.rankeval.MetricDetail; import org.elasticsearch.index.rankeval.PrecisionAtK; @@ -616,7 +618,7 @@ public void testDefaultNamedXContents() { public void testProvidedNamedXContents() { List namedXContents = RestHighLevelClient.getProvidedNamedXContents(); - assertEquals(8, namedXContents.size()); + assertEquals(10, namedXContents.size()); Map, Integer> categories = new HashMap<>(); List names = new ArrayList<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { @@ -630,14 +632,16 @@ public void testProvidedNamedXContents() { assertEquals(Integer.valueOf(2), categories.get(Aggregation.class)); assertTrue(names.contains(ChildrenAggregationBuilder.NAME)); assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME)); - assertEquals(Integer.valueOf(3), categories.get(EvaluationMetric.class)); + assertEquals(Integer.valueOf(4), categories.get(EvaluationMetric.class)); assertTrue(names.contains(PrecisionAtK.NAME)); assertTrue(names.contains(DiscountedCumulativeGain.NAME)); assertTrue(names.contains(MeanReciprocalRank.NAME)); - assertEquals(Integer.valueOf(3), categories.get(MetricDetail.class)); + assertTrue(names.contains(ExpectedReciprocalRank.NAME)); + assertEquals(Integer.valueOf(4), categories.get(MetricDetail.class)); assertTrue(names.contains(PrecisionAtK.NAME)); assertTrue(names.contains(MeanReciprocalRank.NAME)); assertTrue(names.contains(DiscountedCumulativeGain.NAME)); + assertTrue(names.contains(ExpectedReciprocalRank.NAME)); } public void testApiNamingConventions() throws Exception { @@ -661,7 +665,6 @@ public void testApiNamingConventions() throws Exception { "reindex_rethrottle", "render_search_template", "scripts_painless_execute", - "snapshot.restore", "tasks.get", "termvectors", "update_by_query" diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 5483f055c2c12..06aec70a01884 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.common.settings.Settings; @@ -40,12 +42,15 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.snapshots.RestoreInfo; import java.io.IOException; +import java.util.Collections; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; public class SnapshotIT extends ESRestHighLevelClientTestCase { @@ -205,6 +210,42 @@ public void testSnapshotsStatus() throws IOException { assertThat(response.getSnapshots().get(0).getIndices().containsKey(testIndex), is(true)); } + public void testRestoreSnapshot() throws IOException { + String testRepository = "test"; + String testSnapshot = "snapshot_1"; + String testIndex = "test_index"; + String restoredIndex = testIndex + "_restored"; + + PutRepositoryResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}"); + assertTrue(putRepositoryResponse.isAcknowledged()); + + createIndex(testIndex, Settings.EMPTY); + assertTrue("index [" + testIndex + "] should have been created", indexExists(testIndex)); + + CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(testRepository, testSnapshot); + createSnapshotRequest.indices(testIndex); + createSnapshotRequest.waitForCompletion(true); + CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest); + assertEquals(RestStatus.OK, createSnapshotResponse.status()); + + deleteIndex(testIndex); + assertFalse("index [" + testIndex + "] should have been deleted", indexExists(testIndex)); + + RestoreSnapshotRequest request = new RestoreSnapshotRequest(testRepository, testSnapshot); + request.waitForCompletion(true); + request.renamePattern(testIndex); + request.renameReplacement(restoredIndex); + + RestoreSnapshotResponse response = execute(request, highLevelClient().snapshot()::restore, + highLevelClient().snapshot()::restoreAsync); + + RestoreInfo restoreInfo = response.getRestoreInfo(); + assertThat(restoreInfo.name(), equalTo(testSnapshot)); + assertThat(restoreInfo.indices(), equalTo(Collections.singletonList(restoredIndex))); + assertThat(restoreInfo.successfulShards(), greaterThan(0)); + assertThat(restoreInfo.failedShards(), equalTo(0)); + } + public void testDeleteSnapshot() throws IOException { String repository = "test_repository"; String snapshot = "test_snapshot"; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java new file mode 100644 index 0000000000000..d620adb71312b --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java @@ -0,0 +1,106 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.protocol.xpack.license.LicensesStatus; +import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; + +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.startsWith; + +/** + * Documentation for Licensing APIs in the high level java client. + * Code wrapped in {@code tag} and {@code end} tags is included in the docs. + */ +public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testPutLicense() throws Exception { + RestHighLevelClient client = highLevelClient(); + String license = "{\"license\": {\"uid\":\"893361dc-9749-4997-93cb-802e3d7fa4a8\",\"type\":\"gold\"," + + "\"issue_date_in_millis\":1411948800000,\"expiry_date_in_millis\":1914278399999,\"max_nodes\":1,\"issued_to\":\"issued_to\"," + + "\"issuer\":\"issuer\",\"signature\":\"AAAAAgAAAA3U8+YmnvwC+CWsV/mRAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSm" + + "kxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTn" + + "FrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1" + + "JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVm" + + "ZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQ" + + "Be8GfzDm6T537Iuuvjetb3xK5dvg0K5NQapv+rczWcQFxgCuzbF8plkgetP1aAGZP4uRESDQPMlOCsx4d0UqqAm9f7GbBQ3l93P+PogInPFeEH9NvOmaAQovmxVM" + + "9SE6DsDqlX4cXSO+bgWpXPTd2LmpoQc1fXd6BZ8GeuyYpVHVKp9hVU0tAYjw6HzYOE7+zuO1oJYOxElqy66AnIfkvHrvni+flym3tE7tDTgsDRaz7W3iBhaqiSnt" + + "EqabEkvHdPHQdSR99XGaEvnHO1paK01/35iZF6OXHsF7CCj+558GRXiVxzueOe7TsGSSt8g7YjZwV9bRCyU7oB4B/nidgI\"}}"; + { + //tag::put-license-execute + PutLicenseRequest request = new PutLicenseRequest(); + request.setLicenseDefinition(license); // <1> + request.setAcknowledge(false); // <2> + + PutLicenseResponse response = client.xpack().license().putLicense(request, RequestOptions.DEFAULT); + //end::put-license-execute + + //tag::put-license-response + LicensesStatus status = response.status(); // <1> + assertEquals(status, LicensesStatus.VALID); // <2> + boolean acknowledged = response.isAcknowledged(); // <3> + String acknowledgeHeader = response.acknowledgeHeader(); // <4> + Map acknowledgeMessages = response.acknowledgeMessages(); // <5> + //end::put-license-response + + assertFalse(acknowledged); // Should fail because we are trying to downgrade from platinum trial to gold + assertThat(acknowledgeHeader, startsWith("This license update requires acknowledgement.")); + assertThat(acknowledgeMessages.keySet(), not(hasSize(0))); + } + { + PutLicenseRequest request = new PutLicenseRequest(); + // tag::put-license-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PutLicenseResponse indexResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::put-license-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::put-license-execute-async + client.xpack().license().putLicenseAsync( + request, RequestOptions.DEFAULT, listener); // <1> + // end::put-license-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index c60f2d4c92b87..2f743c786bab8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -1136,14 +1136,14 @@ public void testRankEval() throws Exception { // end::rank-eval-execute // tag::rank-eval-response - double evaluationResult = response.getEvaluationResult(); // <1> + double evaluationResult = response.getMetricScore(); // <1> assertEquals(1.0 / 3.0, evaluationResult, 0.0); Map partialResults = response.getPartialResults(); EvalQueryQuality evalQuality = partialResults.get("kimchy_query"); // <2> assertEquals("kimchy_query", evalQuality.getId()); - double qualityLevel = evalQuality.getQualityLevel(); // <3> + double qualityLevel = evalQuality.metricScore(); // <3> assertEquals(1.0 / 3.0, qualityLevel, 0.0); List hitsAndRatings = evalQuality.getHitsAndRatings(); RatedSearchHit ratedSearchHit = hitsAndRatings.get(2); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index fff3e7ece7066..922fcb984d94a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -33,6 +33,8 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; @@ -53,12 +55,15 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.snapshots.SnapshotState; import java.io.IOException; +import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -263,6 +268,107 @@ public void onFailure(Exception e) { } } + public void testRestoreSnapshot() throws IOException { + RestHighLevelClient client = highLevelClient(); + + createTestRepositories(); + createTestIndex(); + createTestSnapshots(); + + // tag::restore-snapshot-request + RestoreSnapshotRequest request = new RestoreSnapshotRequest(repositoryName, snapshotName); + // end::restore-snapshot-request + // we need to restore as a different index name + + // tag::restore-snapshot-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::restore-snapshot-request-masterTimeout + + // tag::restore-snapshot-request-waitForCompletion + request.waitForCompletion(true); // <1> + // end::restore-snapshot-request-waitForCompletion + + // tag::restore-snapshot-request-partial + request.partial(false); // <1> + // end::restore-snapshot-request-partial + + // tag::restore-snapshot-request-include-global-state + request.includeGlobalState(false); // <1> + // end::restore-snapshot-request-include-global-state + + // tag::restore-snapshot-request-include-aliases + request.includeAliases(false); // <1> + // end::restore-snapshot-request-include-aliases + + + // tag::restore-snapshot-request-indices + request.indices("test_index"); + // end::restore-snapshot-request-indices + + String restoredIndexName = "restored_index"; + // tag::restore-snapshot-request-rename + request.renamePattern("test_(.+)"); // <1> + request.renameReplacement("restored_$1"); // <2> + // end::restore-snapshot-request-rename + + // tag::restore-snapshot-request-index-settings + request.indexSettings( // <1> + Settings.builder() + .put("index.number_of_replicas", 0) + .build()); + + request.ignoreIndexSettings("index.refresh_interval", "index.search.idle.after"); // <2> + request.indicesOptions(new IndicesOptions( // <3> + EnumSet.of(IndicesOptions.Option.IGNORE_UNAVAILABLE), + EnumSet.of(IndicesOptions.WildcardStates.OPEN))); + // end::restore-snapshot-request-index-settings + + // tag::restore-snapshot-execute + RestoreSnapshotResponse response = client.snapshot().restore(request, RequestOptions.DEFAULT); + // end::restore-snapshot-execute + + // tag::restore-snapshot-response + RestoreInfo restoreInfo = response.getRestoreInfo(); + List indices = restoreInfo.indices(); // <1> + // end::restore-snapshot-response + assertEquals(Collections.singletonList(restoredIndexName), indices); + assertEquals(0, restoreInfo.failedShards()); + assertTrue(restoreInfo.successfulShards() > 0); + } + + public void testRestoreSnapshotAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + { + RestoreSnapshotRequest request = new RestoreSnapshotRequest(); + + // tag::restore-snapshot-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(RestoreSnapshotResponse restoreSnapshotResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::restore-snapshot-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::restore-snapshot-execute-async + client.snapshot().restoreAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::restore-snapshot-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testSnapshotDeleteRepository() throws IOException { RestHighLevelClient client = highLevelClient(); diff --git a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java index 7f9bcc6ea0818..7bde7fbc06f6b 100644 --- a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java +++ b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java @@ -21,6 +21,7 @@ import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; + import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkModule; diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index fcd69138da328..4fdaa5c1a013c 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -125,32 +125,22 @@ Closure commonPackageConfig(String type, boolean oss) { fileMode 0644 } into('lib') { - with copySpec { - with libFiles(oss) - // we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine - eachFile { FileCopyDetails fcp -> - String[] segments = fcp.relativePath.segments - for (int i = segments.length - 2; i > 0 && segments[i] != 'lib'; --i) { - directory('/' + segments[0..i].join('/'), 0755) - } - fcp.mode = 0644 - } - } + with libFiles(oss) } into('modules') { - with copySpec { - with modulesFiles(oss) - // we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine - eachFile { FileCopyDetails fcp -> - String[] segments = fcp.relativePath.segments - for (int i = segments.length - 2; i > 0 && segments[i] != 'modules'; --i) { - directory('/' + segments[0..i].join('/'), 0755) - } - if (segments[-2] == 'bin') { - fcp.mode = 0755 - } else { - fcp.mode = 0644 - } + with modulesFiles(oss) + } + // we need to specify every intermediate directory in these paths so the package managers know they are explicitly + // intended to manage them; otherwise they may be left behind on uninstallation. duplicate calls of the same + // directory are fine + eachFile { FileCopyDetails fcp -> + String[] segments = fcp.relativePath.segments + for (int i = segments.length - 2; i > 2; --i) { + directory('/' + segments[0..i].join('/'), 0755) + if (segments[-2] == 'bin') { + fcp.mode = 0755 + } else { + fcp.mode = 0644 } } } @@ -333,12 +323,6 @@ Closure commonRpmConfig(boolean oss) { // without this the rpm will have parent dirs of any files we copy in, eg /etc/elasticsearch addParentDirs false - - // Declare the folders so that the RPM package manager removes - // them when upgrading or removing the package - directory('/usr/share/elasticsearch/bin', 0755) - directory('/usr/share/elasticsearch/lib', 0755) - directory('/usr/share/elasticsearch/modules', 0755) } } diff --git a/docs/build.gradle b/docs/build.gradle index 829db4381b046..a67c0217490b3 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -379,9 +379,9 @@ buildRestTests.setups['exams'] = ''' refresh: true body: | {"index":{}} - {"grade": 100} + {"grade": 100, "weight": 2} {"index":{}} - {"grade": 50}''' + {"grade": 50, "weight": 3}''' buildRestTests.setups['stored_example_script'] = ''' # Simple script to load a field. Not really a good example, but a simple one. diff --git a/docs/java-rest/high-level/licensing/put-license.asciidoc b/docs/java-rest/high-level/licensing/put-license.asciidoc new file mode 100644 index 0000000000000..7456fdd3c0236 --- /dev/null +++ b/docs/java-rest/high-level/licensing/put-license.asciidoc @@ -0,0 +1,66 @@ +[[java-rest-high-put-license]] +=== Update License + +[[java-rest-high-put-license-execution]] +==== Execution + +The license can be added or updated using the `putLicense()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[put-license-execute] +-------------------------------------------------- +<1> Set the categories of information to retrieve. The the default is to +return no information which is useful for checking if {xpack} is installed +but not much else. +<2> A JSON document containing the license information. + +[[java-rest-high-put-license-response]] +==== Response + +The returned `PutLicenseResponse` contains the `LicensesStatus`, +`acknowledged` flag and possible acknowledge messages. The acknowledge messages +are present if you previously had a license with more features than one you +are trying to update and you didn't set the `acknowledge` flag to `true`. In this case +you need to display the messages to the end user and if they agree, resubmit the +license with the `acknowledge` flag set to `true`. Please note that the request will +still return a 200 return code even if requires an acknowledgement. So, it is +necessary to check the `acknowledged` flag. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[put-license-response] +-------------------------------------------------- +<1> The status of the license +<2> Make sure that the license is valid. +<3> Check the acknowledge flag. +<4> It should be true if license is acknowledge. +<5> Otherwise we can see the acknowledge messages in `acknowledgeHeader()` and check +component-specific messages in `acknowledgeMessages()`. + +[[java-rest-high-put-license-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[put-license-execute-async] +-------------------------------------------------- +<1> The `PutLicenseRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `PutLicenseResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[put-license-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/snapshot/restore_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/restore_snapshot.asciidoc new file mode 100644 index 0000000000000..a4b83ca419a41 --- /dev/null +++ b/docs/java-rest/high-level/snapshot/restore_snapshot.asciidoc @@ -0,0 +1,144 @@ +[[java-rest-high-snapshot-restore-snapshot]] +=== Restore Snapshot API + +The Restore Snapshot API allows to restore a snapshot. + +[[java-rest-high-snapshot-restore-snapshot-request]] +==== Restore Snapshot Request + +A `RestoreSnapshotRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request] +-------------------------------------------------- + +==== Limiting Indices to Restore + +By default all indices are restored. With the `indices` property you can +provide a list of indices that should be restored: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-indices] +-------------------------------------------------- +<1> Request that Elasticsearch only restores "test_index". + +==== Renaming Indices + +You can rename indices using regular expressions when restoring a snapshot: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-rename] +-------------------------------------------------- +<1> A regular expression matching the indices that should be renamed. +<2> A replacement pattern that references the group from the regular + expression as `$1`. "test_index" from the snapshot is restored as + "restored_index" in this example. + +==== Index Settings and Options + +You can also customize index settings and options when restoring: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-index-settings] +-------------------------------------------------- +<1> Use `#indexSettings()` to set any specific index setting for the indices + that are restored. +<2> Use `#ignoreIndexSettings()` to provide index settings that should be + ignored from the original indices. +<3> Set `IndicesOptions.Option.IGNORE_UNAVAILABLE` in `#indicesOptions()` to + have the restore succeed even if indices are missing in the snapshot. + +==== Further Arguments + +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-waitForCompletion] +-------------------------------------------------- +<1> Boolean indicating whether to wait until the snapshot has been restored. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-partial] +-------------------------------------------------- +<1> Boolean indicating whether the entire snapshot should succeed although one + or more indices participating in the snapshot don’t have all primary + shards available. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-include-global-state] +-------------------------------------------------- +<1> Boolean indicating whether restored templates that don’t currently exist + in the cluster are added and existing templates with the same name are + replaced by the restored templates. The restored persistent settings are + added to the existing persistent settings. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-request-include-aliases] +-------------------------------------------------- +<1> Boolean to control whether aliases should be restored. Set to `false` to + prevent aliases from being restored together with associated indices. + +[[java-rest-high-snapshot-restore-snapshot-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-execute] +-------------------------------------------------- + +[[java-rest-high-snapshot-restore-snapshot-async]] +==== Asynchronous Execution + +The asynchronous execution of a restore snapshot request requires both the +`RestoreSnapshotRequest` instance and an `ActionListener` instance to be +passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-execute-async] +-------------------------------------------------- +<1> The `RestoreSnapshotRequest` to execute and the `ActionListener` +to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `RestoreSnapshotResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is + provided as an argument. +<2> Called in case of a failure. The raised exception is provided as an argument. + +[[java-rest-high-cluster-restore-snapshot-response]] +==== Restore Snapshot Response + +The returned `RestoreSnapshotResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[restore-snapshot-response] +-------------------------------------------------- +<1> The `RestoreInfo` contains details about the restored snapshot like the indices or + the number of successfully restored and failed shards. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index d952870677b7e..25fbcaaaeaa73 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -186,3 +186,12 @@ The Java High Level REST Client supports the following Scripts APIs: include::script/get_script.asciidoc[] include::script/delete_script.asciidoc[] + + +== Licensing APIs + +The Java High Level REST Client supports the following Licensing APIs: + +* <> + +include::licensing/put-license.asciidoc[] diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index 34f4a933f9f76..a75c21fdac658 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -17,15 +17,15 @@ The `phonetic` token filter takes the following settings: `encoder`:: Which phonetic encoder to use. Accepts `metaphone` (default), - `doublemetaphone`, `soundex`, `refinedsoundex`, `caverphone1`, + `double_metaphone`, `soundex`, `refined_soundex`, `caverphone1`, `caverphone2`, `cologne`, `nysiis`, `koelnerphonetik`, `haasephonetik`, - `beidermorse`, `daitch_mokotoff`. + `beider_morse`, `daitch_mokotoff`. `replace`:: Whether or not the original token should be replaced by the phonetic token. Accepts `true` (default) and `false`. Not supported by - `beidermorse` encoding. + `beider_morse` encoding. [source,js] -------------------------------------------------- diff --git a/docs/reference/aggregations/metrics.asciidoc b/docs/reference/aggregations/metrics.asciidoc index ae6bee2eb7d17..96597564dac2d 100644 --- a/docs/reference/aggregations/metrics.asciidoc +++ b/docs/reference/aggregations/metrics.asciidoc @@ -13,6 +13,8 @@ bucket aggregations (some bucket aggregations enable you to sort the returned bu include::metrics/avg-aggregation.asciidoc[] +include::metrics/weighted-avg-aggregation.asciidoc[] + include::metrics/cardinality-aggregation.asciidoc[] include::metrics/extendedstats-aggregation.asciidoc[] diff --git a/docs/reference/aggregations/metrics/weighted-avg-aggregation.asciidoc b/docs/reference/aggregations/metrics/weighted-avg-aggregation.asciidoc new file mode 100644 index 0000000000000..252728a6db367 --- /dev/null +++ b/docs/reference/aggregations/metrics/weighted-avg-aggregation.asciidoc @@ -0,0 +1,202 @@ +[[search-aggregations-metrics-weight-avg-aggregation]] +=== Weighted Avg Aggregation + +A `single-value` metrics aggregation that computes the weighted average of numeric values that are extracted from the aggregated documents. +These values can be extracted either from specific numeric fields in the documents. + +When calculating a regular average, each datapoint has an equal "weight" ... it contributes equally to the final value. Weighted averages, +on the other hand, weight each datapoint differently. The amount that each datapoint contributes to the final value is extracted from the +document, or provided by a script. + +As a formula, a weighted average is the `∑(value * weight) / ∑(weight)` + +A regular average can be thought of as a weighted average where every value has an implicit weight of `1`. + +.`weighted_avg` Parameters +|=== +|Parameter Name |Description |Required |Default Value +|`value` | The configuration for the field or script that provides the values |Required | +|`weight` | The configuration for the field or script that provides the weights |Required | +|`format` | The numeric response formatter |Optional | +|`value_type` | A hint about the values for pure scripts or unmapped fields |Optional | +|=== + +The `value` and `weight` objects have per-field specific configuration: + +.`value` Parameters +|=== +|Parameter Name |Description |Required |Default Value +|`field` | The field that values should be extracted from |Required | +|`missing` | A value to use if the field is missing entirely |Optional | +|`script` | A script which provides the values for the document. This is mutually exclusive with `field` |Optional +|=== + +.`weight` Parameters +|=== +|Parameter Name |Description |Required |Default Value +|`field` | The field that weights should be extracted from |Required | +|`missing` | A weight to use if the field is missing entirely |Optional | +|`script` | A script which provides the weights for the document. This is mutually exclusive with `field` |Optional +|=== + + +==== Examples + +If our documents have a `"grade"` field that holds a 0-100 numeric score, and a `"weight"` field which holds an arbitrary numeric weight, +we can calculate the weighted average using: + +[source,js] +-------------------------------------------------- +POST /exams/_search +{ + "size": 0, + "aggs" : { + "weighted_grade": { + "weighted_avg": { + "value": { + "field": "grade" + }, + "weight": { + "field": "weight" + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:exams] + +Which yields a response like: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "weighted_grade": { + "value": 70.0 + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + + +While multiple values-per-field are allowed, only one weight is allowed. If the aggregation encounters +a document that has more than one weight (e.g. the weight field is a multi-valued field) it will throw an exception. +If you have this situation, you will need to specify a `script` for the weight field, and use the script +to combine the multiple values into a single value to be used. + +This single weight will be applied independently to each value extracted from the `value` field. + +This example show how a single document with multiple values will be averaged with a single weight: + +[source,js] +-------------------------------------------------- +POST /exams/_doc?refresh +{ + "grade": [1, 2, 3], + "weight": 2 +} + +POST /exams/_search +{ + "size": 0, + "aggs" : { + "weighted_grade": { + "weighted_avg": { + "value": { + "field": "grade" + }, + "weight": { + "field": "weight" + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST + +The three values (`1`, `2`, and `3`) will be included as independent values, all with the weight of `2`: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "weighted_grade": { + "value": 2.0 + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +The aggregation returns `2.0` as the result, which matches what we would expect when calculating by hand: +`((1*2) + (2*2) + (3*2)) / (2+2+2) == 2` + +==== Script + +Both the value and the weight can be derived from a script, instead of a field. As a simple example, the following +will add one to the grade and weight in the document using a script: + +[source,js] +-------------------------------------------------- +POST /exams/_search +{ + "size": 0, + "aggs" : { + "weighted_grade": { + "weighted_avg": { + "value": { + "script": "doc.grade.value + 1" + }, + "weight": { + "script": "doc.weight.value + 1" + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:exams] + + +==== Missing values + +The `missing` parameter defines how documents that are missing a value should be treated. +The default behavior is different for `value` and `weight`: + +By default, if the `value` field is missing the document is ignored and the aggregation moves on to the next document. +If the `weight` field is missing, it is assumed to have a weight of `1` (like a normal average). + +Both of these defaults can be overridden with the `missing` parameter: + +[source,js] +-------------------------------------------------- +POST /exams/_search +{ + "size": 0, + "aggs" : { + "weighted_grade": { + "weighted_avg": { + "value": { + "field": "grade", + "missing": 2 + }, + "weight": { + "field": "weight", + "missing": 3 + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:exams] + diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index cf13b9f7b0655..81c464b71d575 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -259,6 +259,56 @@ in the query. Defaults to 10. |`normalize` | If set to `true`, this metric will calculate the https://en.wikipedia.org/wiki/Discounted_cumulative_gain#Normalized_DCG[Normalized DCG]. |======================================================================= +[float] +==== Expected Reciprocal Rank (ERR) + +Expected Reciprocal Rank (ERR) is an extension of the classical reciprocal rank for the graded relevance case +(Olivier Chapelle, Donald Metzler, Ya Zhang, and Pierre Grinspan. 2009. http://olivier.chapelle.cc/pub/err.pdf[Expected reciprocal rank for graded relevance].) + +It is based on the assumption of a cascade model of search, in which a user scans through ranked search +results in order and stops at the first document that satisfies the information need. For this reason, it +is a good metric for question answering and navigation queries, but less so for survey oriented information +needs where the user is interested in finding many relevant documents in the top k results. + +The metric models the expectation of the reciprocal of the position at which a user stops reading through +the result list. This means that relevant document in top ranking positions will contribute much to the +overall score. However, the same document will contribute much less to the score if it appears in a lower rank, +even more so if there are some relevant (but maybe less relevant) documents preceding it. +In this way, the ERR metric discounts documents which are shown after very relevant documents. This introduces +a notion of dependency in the ordering of relevant documents that e.g. Precision or DCG don't account for. + +[source,js] +-------------------------------- +GET /twitter/_rank_eval +{ + "requests": [ + { + "id": "JFK query", + "request": { "query": { "match_all": {}}}, + "ratings": [] + }], + "metric": { + "expected_reciprocal_rank": { + "maximum_relevance" : 3, + "k" : 20 + } + } +} +-------------------------------- +// CONSOLE +// TEST[setup:twitter] + +The `expected_reciprocal_rank` metric takes the following parameters: + +[cols="<,<",options="header",] +|======================================================================= +|Parameter |Description +| `maximum_relevance` | Mandatory parameter. The highest relevance grade used in the user supplied +relevance judgments. +|`k` | sets the maximum number of documents retrieved per query. This value will act in place of the usual `size` parameter +in the query. Defaults to 10. +|======================================================================= + [float] === Response format @@ -270,10 +320,10 @@ that shows potential errors of individual queries. The response has the followin -------------------------------- { "rank_eval": { - "quality_level": 0.4, <1> + "metric_score": 0.4, <1> "details": { "my_query_id1": { <2> - "quality_level": 0.6, <3> + "metric_score": 0.6, <3> "unrated_docs": [ <4> { "_index": "my_index", @@ -308,7 +358,7 @@ that shows potential errors of individual queries. The response has the followin <1> the overall evaluation quality calculated by the defined metric <2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id -<3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score +<3> the `metric_score` in the `details` section shows the contribution of this query to the global quality metric score <4> the `unrated_docs` section contains an `_index` and `_id` entry for each document in the search result for this query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents <5> the `hits` section shows a grouping of the search results with their supplied rating diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java index 8ff0cfcd0c876..af4eabefd94ef 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java @@ -137,7 +137,6 @@ private static class TestChannelFactory extends ChannelFactory { + extends ArrayValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = "matrix_stats"; private MultiValueMode multiValueMode = MultiValueMode.AVG; diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java index 578116d7b5eb2..aa19f62fedc4f 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.support.MultiValuesSource.NumericMultiValuesSource; +import org.elasticsearch.search.aggregations.support.ArrayValuesSource.NumericArrayValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -43,7 +43,7 @@ **/ final class MatrixStatsAggregator extends MetricsAggregator { /** Multiple ValuesSource with field names */ - private final NumericMultiValuesSource valuesSources; + private final NumericArrayValuesSource valuesSources; /** array of descriptive stats, per shard, needed to compute the correlation */ ObjectArray stats; @@ -53,7 +53,7 @@ final class MatrixStatsAggregator extends MetricsAggregator { Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); if (valuesSources != null && !valuesSources.isEmpty()) { - this.valuesSources = new NumericMultiValuesSource(valuesSources, multiValueMode); + this.valuesSources = new NumericArrayValuesSource(valuesSources, multiValueMode); stats = context.bigArrays().newObjectArray(1); } else { this.valuesSources = null; diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java index 2c3ac82a0c1a8..fb456d75bb78b 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ArrayValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.internal.SearchContext; @@ -33,7 +33,7 @@ import java.util.Map; final class MatrixStatsAggregatorFactory - extends MultiValuesSourceAggregatorFactory { + extends ArrayValuesSourceAggregatorFactory { private final MultiValueMode multiValueMode; diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java index fd13037e8f922..0f48d1855ae3e 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsParser.java @@ -21,14 +21,14 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.MultiValueMode; -import org.elasticsearch.search.aggregations.support.MultiValuesSourceParser.NumericValuesSourceParser; +import org.elasticsearch.search.aggregations.support.ArrayValuesSourceParser.NumericValuesSourceParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Map; -import static org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregationBuilder.MULTIVALUE_MODE_FIELD; +import static org.elasticsearch.search.aggregations.support.ArrayValuesSourceAggregationBuilder.MULTIVALUE_MODE_FIELD; public class MatrixStatsParser extends NumericValuesSourceParser { diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSource.java similarity index 87% rename from modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java rename to modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSource.java index 86d1836721f10..94bf68c7ae489 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSource.java @@ -28,13 +28,13 @@ /** * Class to encapsulate a set of ValuesSource objects labeled by field name */ -public abstract class MultiValuesSource { +public abstract class ArrayValuesSource { protected MultiValueMode multiValueMode; protected String[] names; protected VS[] values; - public static class NumericMultiValuesSource extends MultiValuesSource { - public NumericMultiValuesSource(Map valuesSources, MultiValueMode multiValueMode) { + public static class NumericArrayValuesSource extends ArrayValuesSource { + public NumericArrayValuesSource(Map valuesSources, MultiValueMode multiValueMode) { super(valuesSources, multiValueMode); if (valuesSources != null) { this.values = valuesSources.values().toArray(new ValuesSource.Numeric[0]); @@ -51,8 +51,8 @@ public NumericDoubleValues getField(final int ordinal, LeafReaderContext ctx) th } } - public static class BytesMultiValuesSource extends MultiValuesSource { - public BytesMultiValuesSource(Map valuesSources, MultiValueMode multiValueMode) { + public static class BytesArrayValuesSource extends ArrayValuesSource { + public BytesArrayValuesSource(Map valuesSources, MultiValueMode multiValueMode) { super(valuesSources, multiValueMode); this.values = valuesSources.values().toArray(new ValuesSource.Bytes[0]); } @@ -62,14 +62,14 @@ public Object getField(final int ordinal, LeafReaderContext ctx) throws IOExcept } } - public static class GeoPointValuesSource extends MultiValuesSource { + public static class GeoPointValuesSource extends ArrayValuesSource { public GeoPointValuesSource(Map valuesSources, MultiValueMode multiValueMode) { super(valuesSources, multiValueMode); this.values = valuesSources.values().toArray(new ValuesSource.GeoPoint[0]); } } - private MultiValuesSource(Map valuesSources, MultiValueMode multiValueMode) { + private ArrayValuesSource(Map valuesSources, MultiValueMode multiValueMode) { if (valuesSources != null) { this.names = valuesSources.keySet().toArray(new String[0]); } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregationBuilder.java similarity index 91% rename from modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java rename to modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregationBuilder.java index 4cf497c9c02a5..eb8152e0fe0b8 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregationBuilder.java @@ -44,13 +44,13 @@ import java.util.Map; import java.util.Objects; -public abstract class MultiValuesSourceAggregationBuilder> - extends AbstractAggregationBuilder { +public abstract class ArrayValuesSourceAggregationBuilder> + extends AbstractAggregationBuilder { public static final ParseField MULTIVALUE_MODE_FIELD = new ParseField("mode"); - public abstract static class LeafOnly> - extends MultiValuesSourceAggregationBuilder { + public abstract static class LeafOnly> + extends ArrayValuesSourceAggregationBuilder { protected LeafOnly(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name, valuesSourceType, targetValueType); @@ -94,7 +94,7 @@ public AB subAggregations(Builder subFactories) { private Object missing = null; private Map missingMap = Collections.emptyMap(); - protected MultiValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) { + protected ArrayValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name); if (valuesSourceType == null) { throw new IllegalArgumentException("[valuesSourceType] must not be null: [" + name + "]"); @@ -103,7 +103,7 @@ protected MultiValuesSourceAggregationBuilder(String name, ValuesSourceType valu this.targetValueType = targetValueType; } - protected MultiValuesSourceAggregationBuilder(MultiValuesSourceAggregationBuilder clone, + protected ArrayValuesSourceAggregationBuilder(ArrayValuesSourceAggregationBuilder clone, Builder factoriesBuilder, Map metaData) { super(clone, factoriesBuilder, metaData); this.valuesSourceType = clone.valuesSourceType; @@ -115,7 +115,7 @@ protected MultiValuesSourceAggregationBuilder(MultiValuesSourceAggregationBuilde this.missing = clone.missing; } - protected MultiValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType, ValueType targetValueType) + protected ArrayValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType, ValueType targetValueType) throws IOException { super(in); assert false == serializeTargetValueType() : "Wrong read constructor called for subclass that provides its targetValueType"; @@ -124,7 +124,7 @@ protected MultiValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType v read(in); } - protected MultiValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType) throws IOException { + protected ArrayValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType) throws IOException { super(in); assert serializeTargetValueType() : "Wrong read constructor called for subclass that serializes its targetValueType"; this.valuesSourceType = valuesSourceType; @@ -239,10 +239,10 @@ public Map missingMap() { } @Override - protected final MultiValuesSourceAggregatorFactory doBuild(SearchContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + protected final ArrayValuesSourceAggregatorFactory doBuild(SearchContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder) throws IOException { Map> configs = resolveConfig(context); - MultiValuesSourceAggregatorFactory factory = innerBuild(context, configs, parent, subFactoriesBuilder); + ArrayValuesSourceAggregatorFactory factory = innerBuild(context, configs, parent, subFactoriesBuilder); return factory; } @@ -255,9 +255,10 @@ protected Map> resolveConfig(SearchContext contex return configs; } - protected abstract MultiValuesSourceAggregatorFactory innerBuild(SearchContext context, - Map> configs, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder) throws IOException; + protected abstract ArrayValuesSourceAggregatorFactory innerBuild(SearchContext context, + Map> configs, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder) throws IOException; public ValuesSourceConfig config(SearchContext context, String field, Script script) { @@ -355,14 +356,14 @@ public final XContentBuilder internalXContent(XContentBuilder builder, Params pa @Override protected final int doHashCode() { return Objects.hash(fields, format, missing, targetValueType, valueType, valuesSourceType, - innerHashCode()); + innerHashCode()); } protected abstract int innerHashCode(); @Override protected final boolean doEquals(Object obj) { - MultiValuesSourceAggregationBuilder other = (MultiValuesSourceAggregationBuilder) obj; + ArrayValuesSourceAggregationBuilder other = (ArrayValuesSourceAggregationBuilder) obj; if (!Objects.equals(fields, other.fields)) return false; if (!Objects.equals(format, other.format)) diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregatorFactory.java similarity index 78% rename from modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java rename to modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregatorFactory.java index 7d5c56a571bbe..ce8eeecd19036 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceAggregatorFactory.java @@ -30,14 +30,15 @@ import java.util.List; import java.util.Map; -public abstract class MultiValuesSourceAggregatorFactory> - extends AggregatorFactory { +public abstract class ArrayValuesSourceAggregatorFactory> + extends AggregatorFactory { protected Map> configs; - public MultiValuesSourceAggregatorFactory(String name, Map> configs, - SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, - Map metaData) throws IOException { + public ArrayValuesSourceAggregatorFactory(String name, Map> configs, + SearchContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { super(name, context, parent, subFactoriesBuilder, metaData); this.configs = configs; } @@ -63,6 +64,7 @@ protected abstract Aggregator createUnmapped(Aggregator parent, List metaData) throws IOException; protected abstract Aggregator doCreateInternal(Map valuesSources, Aggregator parent, boolean collectsFromSingleBucket, - List pipelineAggregators, Map metaData) throws IOException; + List pipelineAggregators, + Map metaData) throws IOException; } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java similarity index 86% rename from modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java rename to modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java index 22a90b552d920..1100884cf8ace 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/ArrayValuesSourceParser.java @@ -33,30 +33,30 @@ import java.util.List; import java.util.Map; -public abstract class MultiValuesSourceParser implements Aggregator.Parser { +public abstract class ArrayValuesSourceParser implements Aggregator.Parser { - public abstract static class AnyValuesSourceParser extends MultiValuesSourceParser { + public abstract static class AnyValuesSourceParser extends ArrayValuesSourceParser { protected AnyValuesSourceParser(boolean formattable) { super(formattable, ValuesSourceType.ANY, null); } } - public abstract static class NumericValuesSourceParser extends MultiValuesSourceParser { + public abstract static class NumericValuesSourceParser extends ArrayValuesSourceParser { protected NumericValuesSourceParser(boolean formattable) { super(formattable, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } } - public abstract static class BytesValuesSourceParser extends MultiValuesSourceParser { + public abstract static class BytesValuesSourceParser extends ArrayValuesSourceParser { protected BytesValuesSourceParser(boolean formattable) { super(formattable, ValuesSourceType.BYTES, ValueType.STRING); } } - public abstract static class GeoPointValuesSourceParser extends MultiValuesSourceParser { + public abstract static class GeoPointValuesSourceParser extends ArrayValuesSourceParser { protected GeoPointValuesSourceParser(boolean formattable) { super(formattable, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); @@ -67,15 +67,15 @@ protected GeoPointValuesSourceParser(boolean formattable) { private ValuesSourceType valuesSourceType = null; private ValueType targetValueType = null; - private MultiValuesSourceParser(boolean formattable, ValuesSourceType valuesSourceType, ValueType targetValueType) { + private ArrayValuesSourceParser(boolean formattable, ValuesSourceType valuesSourceType, ValueType targetValueType) { this.valuesSourceType = valuesSourceType; this.targetValueType = targetValueType; this.formattable = formattable; } @Override - public final MultiValuesSourceAggregationBuilder parse(String aggregationName, XContentParser parser) - throws IOException { + public final ArrayValuesSourceAggregationBuilder parse(String aggregationName, XContentParser parser) + throws IOException { List fields = null; ValueType valueType = null; @@ -98,7 +98,7 @@ private MultiValuesSourceParser(boolean formattable, ValuesSourceType valuesSour "Multi-field aggregations do not support scripts."); } else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) { throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); } } else if (token == XContentParser.Token.START_OBJECT) { if (CommonFields.MISSING.match(currentFieldName, parser.getDeprecationHandler())) { @@ -113,7 +113,7 @@ private MultiValuesSourceParser(boolean formattable, ValuesSourceType valuesSour } else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) { throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); } } else if (token == XContentParser.Token.START_ARRAY) { if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -127,21 +127,21 @@ private MultiValuesSourceParser(boolean formattable, ValuesSourceType valuesSour fields.add(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); } } } else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) { throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); } } else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) { throw new ParsingException(parser.getTokenLocation(), - "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); + "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]."); } } - MultiValuesSourceAggregationBuilder factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType, - otherOptions); + ArrayValuesSourceAggregationBuilder factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType, + otherOptions); if (fields != null) { factory.fields(fields); } @@ -182,7 +182,7 @@ private void parseMissingAndAdd(final String aggregationName, final String curre /** * Creates a {@link ValuesSourceAggregationBuilder} from the information * gathered by the subclass. Options parsed in - * {@link MultiValuesSourceParser} itself will be added to the factory + * {@link ArrayValuesSourceParser} itself will be added to the factory * after it has been returned by this method. * * @param aggregationName @@ -197,11 +197,13 @@ private void parseMissingAndAdd(final String aggregationName, final String curre * method * @return the created factory */ - protected abstract MultiValuesSourceAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions); + protected abstract ArrayValuesSourceAggregationBuilder createFactory(String aggregationName, + ValuesSourceType valuesSourceType, + ValueType targetValueType, + Map otherOptions); /** - * Allows subclasses of {@link MultiValuesSourceParser} to parse extra + * Allows subclasses of {@link ArrayValuesSourceParser} to parse extra * parameters and store them in a {@link Map} which will later be passed to * {@link #createFactory(String, ValuesSourceType, ValueType, Map)}. * diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java index 70e2172ce92d9..b5a348f45eb54 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java @@ -43,7 +43,6 @@ class MultiPassStats { this.fieldBKey = fieldBName; } - @SuppressWarnings("unchecked") void computeStats(final List fieldA, final List fieldB) { // set count count = fieldA.size(); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java index 264df6f4c5f24..2e881b82b59de 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java @@ -42,7 +42,11 @@ enum Type { @Override public Object convert(Object value) { try { - return Integer.parseInt(value.toString()); + String strValue = value.toString(); + if (strValue.startsWith("0x") || strValue.startsWith("-0x")) { + return Integer.decode(strValue); + } + return Integer.parseInt(strValue); } catch(NumberFormatException e) { throw new IllegalArgumentException("unable to convert [" + value + "] to integer", e); } @@ -52,7 +56,11 @@ public Object convert(Object value) { @Override public Object convert(Object value) { try { - return Long.parseLong(value.toString()); + String strValue = value.toString(); + if (strValue.startsWith("0x") || strValue.startsWith("-0x")) { + return Long.decode(strValue); + } + return Long.parseLong(strValue); } catch(NumberFormatException e) { throw new IllegalArgumentException("unable to convert [" + value + "] to long", e); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java index 292a03d7d9033..f0fc31dab3533 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java @@ -49,6 +49,33 @@ public void testConvertInt() throws Exception { assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt)); } + public void testConvertIntHex() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + int randomInt = randomInt(); + String intString = randomInt < 0 ? "-0x" + Integer.toHexString(-randomInt) : "0x" + Integer.toHexString(randomInt); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, intString); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt)); + } + + public void testConvertIntLeadingZero() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "010"); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(10)); + } + + public void testConvertIntHexError() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String value = "0x" + randomAlphaOfLengthBetween(1, 10); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); + assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to integer")); + } + public void testConvertIntList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); @@ -92,6 +119,33 @@ public void testConvertLong() throws Exception { assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(randomLong)); } + public void testConvertLongHex() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + long randomLong = randomLong(); + String longString = randomLong < 0 ? "-0x" + Long.toHexString(-randomLong) : "0x" + Long.toHexString(randomLong); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, longString); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(randomLong)); + } + + public void testConvertLongLeadingZero() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "010"); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(10L)); + } + + public void testConvertLongHexError() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String value = "0x" + randomAlphaOfLengthBetween(1, 10); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); + assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to long")); + } + public void testConvertLongList() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int numItems = randomIntBetween(1, 10); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 2867ed1d24053..099e8e1866b8e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -146,7 +146,6 @@ public void testFieldMissing() { assertThat(exception.getMessage(), equalTo("field [field] not present as part of path [field]")); } - @SuppressWarnings("unchecked") public void testAddToRoot() throws Exception { String processorTag = randomAlphaOfLength(3); String randomTargetField = randomAlphaOfLength(2); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 752c0c205dd89..bcecd7bbdc78e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -30,30 +30,30 @@ public final class PainlessLookup { public Collection getStructs() { - return javaClassesToPainlessStructs.values(); + return classesToPainlessClasses.values(); } - private final Map> painlessTypesToJavaClasses; - private final Map, PainlessClass> javaClassesToPainlessStructs; + private final Map> canonicalClassNamesToClasses; + private final Map, PainlessClass> classesToPainlessClasses; - PainlessLookup(Map> painlessTypesToJavaClasses, Map, PainlessClass> javaClassesToPainlessStructs) { - this.painlessTypesToJavaClasses = Collections.unmodifiableMap(painlessTypesToJavaClasses); - this.javaClassesToPainlessStructs = Collections.unmodifiableMap(javaClassesToPainlessStructs); + PainlessLookup(Map> canonicalClassNamesToClasses, Map, PainlessClass> classesToPainlessClasses) { + this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses); + this.classesToPainlessClasses = Collections.unmodifiableMap(classesToPainlessClasses); } public Class getClassFromBinaryName(String painlessType) { - return painlessTypesToJavaClasses.get(painlessType.replace('$', '.')); + return canonicalClassNamesToClasses.get(painlessType.replace('$', '.')); } public boolean isSimplePainlessType(String painlessType) { - return painlessTypesToJavaClasses.containsKey(painlessType); + return canonicalClassNamesToClasses.containsKey(painlessType); } public PainlessClass getPainlessStructFromJavaClass(Class clazz) { - return javaClassesToPainlessStructs.get(clazz); + return classesToPainlessClasses.get(clazz); } public Class getJavaClassFromPainlessType(String painlessType) { - return PainlessLookupUtility.canonicalTypeNameToType(painlessType, painlessTypesToJavaClasses); + return PainlessLookupUtility.canonicalTypeNameToType(painlessType, canonicalClassNamesToClasses); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 06773d3ffddf9..b15f1f13f203a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -24,10 +24,12 @@ import org.elasticsearch.painless.spi.WhitelistConstructor; import org.elasticsearch.painless.spi.WhitelistField; import org.elasticsearch.painless.spi.WhitelistMethod; -import org.objectweb.asm.Type; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; @@ -35,11 +37,15 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Stack; import java.util.regex.Pattern; -import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_TYPE_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.CONSTRUCTOR_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_CLASS_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToJavaType; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCanonicalTypeNames; public class PainlessLookupBuilder { @@ -123,17 +129,17 @@ public int hashCode() { private final List whitelists; private final Map> canonicalClassNamesToClasses; - private final Map, PainlessClassBuilder> classesToPainlessClasses; + private final Map, PainlessClassBuilder> classesToPainlessClassBuilders; public PainlessLookupBuilder(List whitelists) { this.whitelists = whitelists; canonicalClassNamesToClasses = new HashMap<>(); - classesToPainlessClasses = new HashMap<>(); + classesToPainlessClassBuilders = new HashMap<>(); - canonicalClassNamesToClasses.put(DEF_TYPE_NAME, def.class); - classesToPainlessClasses.put(def.class, - new PainlessClassBuilder(DEF_TYPE_NAME, Object.class, Type.getType(Object.class))); + canonicalClassNamesToClasses.put(DEF_CLASS_NAME, def.class); + classesToPainlessClassBuilders.put(def.class, + new PainlessClassBuilder(DEF_CLASS_NAME, Object.class, org.objectweb.asm.Type.getType(Object.class))); } private Class canonicalTypeNameToType(String canonicalTypeName) { @@ -141,7 +147,7 @@ private Class canonicalTypeNameToType(String canonicalTypeName) { } private void validateType(Class type) { - PainlessLookupUtility.validateType(type, classesToPainlessClasses.keySet()); + PainlessLookupUtility.validateType(type, classesToPainlessClassBuilders.keySet()); } public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) { @@ -174,10 +180,10 @@ public void addPainlessClass(Class clazz, boolean importClassName) { Objects.requireNonNull(clazz); if (clazz == def.class) { - throw new IllegalArgumentException("cannot add reserved class [" + DEF_TYPE_NAME + "]"); + throw new IllegalArgumentException("cannot add reserved class [" + DEF_CLASS_NAME + "]"); } - String canonicalClassName = clazz.getCanonicalName(); + String canonicalClassName = typeToCanonicalTypeName(clazz); if (clazz.isArray()) { throw new IllegalArgumentException("cannot add array type [" + canonicalClassName + "] as a class"); @@ -187,13 +193,14 @@ public void addPainlessClass(Class clazz, boolean importClassName) { throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } - PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClasses.get(clazz); + PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClassBuilders.get(clazz); if (existingPainlessClassBuilder == null) { - PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(canonicalClassName, clazz, Type.getType(clazz)); + PainlessClassBuilder painlessClassBuilder = + new PainlessClassBuilder(canonicalClassName, clazz, org.objectweb.asm.Type.getType(clazz)); canonicalClassNamesToClasses.put(canonicalClassName, clazz); - classesToPainlessClasses.put(clazz, painlessClassBuilder); + classesToPainlessClassBuilders.put(clazz, painlessClassBuilder); } else if (existingPainlessClassBuilder.clazz.equals(clazz) == false) { throw new IllegalArgumentException("class [" + canonicalClassName + "] " + "cannot represent multiple java classes with the same name from different class loaders"); @@ -207,477 +214,455 @@ public void addPainlessClass(Class clazz, boolean importClassName) { throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package"); } } else { - Class importedPainlessType = canonicalClassNamesToClasses.get(importedCanonicalClassName); + Class importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); - if (importedPainlessType == null) { + if (importedPainlessClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { - throw new IllegalArgumentException( - "inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedPainlessType.equals(clazz) == false) { - throw new IllegalArgumentException("painless type [" + importedCanonicalClassName + "] illegally represents multiple " + - "java types [" + clazz.getCanonicalName() + "] and [" + importedPainlessType.getCanonicalName() + "]"); + } else if (importedPainlessClass.equals(clazz) == false) { + throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); } else if (importClassName == false) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); } } } - private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { - PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); + public void addPainlessConstructor(String targetCanonicalClassName, List typeNameParameters) { + Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(typeNameParameters); - if (ownerStruct == null) { - throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + - "parameters " + whitelistConstructor.painlessParameterTypeNames); - } + Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); - List> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size()); - Class[] javaClassParameters = new Class[whitelistConstructor.painlessParameterTypeNames.size()]; + if (targetClass == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + + "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]"); + } - for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) { - String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount); + List> typeParameters = new ArrayList<>(typeNameParameters.size()); + for (String typeNameParameter : typeNameParameters) { try { - Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); + Class typeParameter = canonicalTypeNameToType(typeNameParameter); + typeParameters.add(typeParameter); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + + "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]", iae); + } + } + + addPainlessConstructor(targetClass, typeParameters); + } + + public void addPainlessConstructor(Class targetClass, List> typeParameters) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(typeParameters); + + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add constructor to reserved class [" + DEF_CLASS_NAME + "]"); + } + + String targetCanonicalClassName = targetClass.getCanonicalName(); + PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); + + if (painlessClassBuilder == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + + "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + int typeParametersSize = typeParameters.size(); + List> javaTypeParameters = new ArrayList<>(typeParametersSize); - painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount] = PainlessLookupUtility.typeToJavaType(painlessParameterClass); + for (Class typeParameter : typeParameters) { + try { + validateType(typeParameter); } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " + - "with owner struct [" + ownerStructName + "] and constructor parameters " + - whitelistConstructor.painlessParameterTypeNames, iae); + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + + "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); } + + javaTypeParameters.add(typeToJavaType(typeParameter)); } - java.lang.reflect.Constructor javaConstructor; + Constructor javaConstructor; try { - javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters); - } catch (NoSuchMethodException exception) { - throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " + - " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception); + javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("constructor reflection object " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); } - String painlessMethodKey = buildPainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); - PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey); + String painlessMethodKey = buildPainlessMethodKey(CONSTRUCTOR_NAME, typeParametersSize); + PainlessMethod painlessConstructor = painlessClassBuilder.constructors.get(painlessMethodKey); if (painlessConstructor == null) { org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor); - MethodHandle javaHandle; + MethodHandle methodHandle; try { - javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor); - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " + - " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames); + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("constructor method handle " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); } painlessConstructor = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(ownerStruct.clazz, "", painlessParametersTypes), - key -> new PainlessMethod("", ownerStruct.clazz, null, void.class, painlessParametersTypes, - asmConstructor, javaConstructor.getModifiers(), javaHandle)); - ownerStruct.constructors.put(painlessMethodKey, painlessConstructor); - } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){ - throw new IllegalArgumentException( - "illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " + - "with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments); + new PainlessMethodCacheKey(targetClass, CONSTRUCTOR_NAME, typeParameters), + key -> new PainlessMethod(CONSTRUCTOR_NAME, targetClass, null, void.class, typeParameters, + asmConstructor, javaConstructor.getModifiers(), methodHandle) + ); + + painlessClassBuilder.constructors.put(painlessMethodKey, painlessConstructor); + } else if (painlessConstructor.arguments.equals(typeParameters) == false){ + throw new IllegalArgumentException("cannot have constructors " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.arguments) + "] " + + "with the same arity and different type parameters"); } } - private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { - PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); + public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName, + String methodName, String returnCanonicalTypeName, List typeNameParameters) { - if (ownerStruct == null) { - throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + - "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); - } + Objects.requireNonNull(classLoader); + Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnCanonicalTypeName); + Objects.requireNonNull(typeNameParameters); + + Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); - if (METHOD_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) { - throw new IllegalArgumentException("invalid method name" + - " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "]."); + if (targetClass == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]"); } - Class javaAugmentedClass; + Class augmentedClass = null; - if (whitelistMethod.javaAugmentedClassName != null) { + if (augmentedCanonicalClassName != null) { try { - javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader); + augmentedClass = Class.forName(augmentedCanonicalClassName, true, classLoader); } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " + - "not found for method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe); + throw new IllegalArgumentException("augmented class [" + augmentedCanonicalClassName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", cnfe); } - } else { - javaAugmentedClass = null; } - int augmentedOffset = javaAugmentedClass == null ? 0 : 1; + List> typeParameters = new ArrayList<>(typeNameParameters.size()); + + for (String typeNameParameter : typeNameParameters) { + try { + Class typeParameter = canonicalTypeNameToType(typeNameParameter); + typeParameters.add(typeParameter); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); + } + } - List> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size()); - Class[] javaClassParameters = new Class[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset]; + Class returnType; - if (javaAugmentedClass != null) { - javaClassParameters[0] = ownerStruct.clazz; + try { + returnType = canonicalTypeNameToType(returnCanonicalTypeName); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); } - for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) { - String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount); + addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters); + } - try { - Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); + public void addPainlessMethod(Class targetClass, Class augmentedClass, String methodName, + Class returnType, List> typeParameters) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnType); + Objects.requireNonNull(typeParameters); - painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount + augmentedOffset] = - PainlessLookupUtility.typeToJavaType(painlessParameterClass); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " + - "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); - } + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add method to reserved class [" + DEF_CLASS_NAME + "]"); } - Class javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass; - java.lang.reflect.Method javaMethod; + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); - try { - javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters); - } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" + - javaImplClass.getName() + "]", nsme); + if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + throw new IllegalArgumentException( + "invalid method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]."); } - Class painlessReturnClass; + PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); + + if (painlessClassBuilder == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + int typeParametersSize = typeParameters.size(); + int augmentedParameterOffset = augmentedClass == null ? 0 : 1; + List> javaTypeParameters = new ArrayList<>(typeParametersSize + augmentedParameterOffset); + + if (augmentedClass != null) { + javaTypeParameters.add(targetClass); + } + + for (Class typeParameter : typeParameters) { + try { + validateType(typeParameter); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + + "not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]", iae); + } + + javaTypeParameters.add(typeToJavaType(typeParameter)); + } try { - painlessReturnClass = canonicalTypeNameToType(whitelistMethod.painlessReturnTypeName); + validateType(returnType); } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " + - "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); } - if (javaMethod.getReturnType() != PainlessLookupUtility.typeToJavaType(painlessReturnClass)) { - throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " + - "does not match the return type class [" + javaMethod.getReturnType() + "] for the " + - "method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames); + Method javaMethod; + + if (augmentedClass == null) { + try { + javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + } + } else { + try { + javaMethod = augmentedClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + + "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); + } } - String painlessMethodKey = - buildPainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); + if (javaMethod.getReturnType() != typeToJavaType(returnType)) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + + "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + + "for method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); - if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { - PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey); + if (augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { + PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); if (painlessMethod == null) { org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); MethodHandle javaMethodHandle; try { - javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod); - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("method handle not found for method with name " + - "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); + javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); } painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, null, painlessReturnClass, - painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); - ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod); - } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass && - painlessMethod.arguments.equals(painlessParametersTypes)) == false) { - throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " + - "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " + - "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " + - "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments); + new PainlessMethodCacheKey(targetClass, methodName, typeParameters), + key -> new PainlessMethod(methodName, targetClass, null, returnType, + typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); + + painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod); + } else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType && + painlessMethod.arguments.equals(typeParameters)) == false) { + throw new IllegalArgumentException("cannot have static methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " + + typesToCanonicalTypeNames(painlessMethod.arguments) + "] " + + "with the same arity and different return type or type parameters"); } } else { - PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey); + PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); if (painlessMethod == null) { org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); MethodHandle javaMethodHandle; - try { - javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod); - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("method handle not found for method with name " + - "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); + if (augmentedClass == null) { + try { + javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); + } + } else { + try { + javaMethodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + + "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); + } } painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, javaAugmentedClass, painlessReturnClass, - painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); - ownerStruct.methods.put(painlessMethodKey, painlessMethod); - } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) && - painlessMethod.arguments.equals(painlessParametersTypes)) == false) { - throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " + - "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " + - "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " + - "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments); + new PainlessMethodCacheKey(targetClass, methodName, typeParameters), + key -> new PainlessMethod(methodName, targetClass, augmentedClass, returnType, + typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); + + painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod); + } else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType && + painlessMethod.arguments.equals(typeParameters)) == false) { + throw new IllegalArgumentException("cannot have methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " + + typesToCanonicalTypeNames(painlessMethod.arguments) + "] " + + "with the same arity and different return type or type parameters"); } } } - private void addField(String ownerStructName, WhitelistField whitelistField) { - PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); + public void addPainlessField(String targetCanonicalClassName, String fieldName, String typeNameParameter) { + Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(fieldName); + Objects.requireNonNull(typeNameParameter); - if (ownerStruct == null) { - throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + - "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName); - } + Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); - if (FIELD_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) { - throw new IllegalArgumentException("invalid field name " + - "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "]."); + if (targetClass == null) { + throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); } - java.lang.reflect.Field javaField; + Class typeParameter; try { - javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName); - } catch (NoSuchFieldException exception) { - throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " + - "not found for class [" + ownerStruct.clazz.getName() + "]."); + typeParameter = canonicalTypeNameToType(typeNameParameter); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); + } + + + addPainlessField(targetClass, fieldName, typeParameter); + } + + public void addPainlessField(Class targetClass, String fieldName, Class typeParameter) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(fieldName); + Objects.requireNonNull(typeParameter); + + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add field to reserved class [" + DEF_CLASS_NAME + "]"); + } + + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); + + if (FIELD_NAME_PATTERN.matcher(fieldName).matches() == false) { + throw new IllegalArgumentException( + "invalid field name [" + fieldName + "] for target class [" + targetCanonicalClassName + "]."); } - Class painlessFieldClass; + + PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); + + if (painlessClassBuilder == null) { + throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + } try { - painlessFieldClass = canonicalTypeNameToType(whitelistField.painlessFieldTypeName); + validateType(typeParameter); } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " + - "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae); + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", iae); + } + + Field javaField; + + try { + javaField = targetClass.getField(fieldName); + } catch (NoSuchFieldException nsme) { + throw new IllegalArgumentException( + "field reflection object [[" + targetCanonicalClassName + "], [" + fieldName + "] not found", nsme); } + if (javaField.getType() != typeToJavaType(typeParameter)) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaField.getType()) + "] " + + "does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); + } + + String painlessFieldKey = buildPainlessFieldKey(fieldName); + if (Modifier.isStatic(javaField.getModifiers())) { if (Modifier.isFinal(javaField.getModifiers()) == false) { - throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " + - "with owner struct [" + ownerStruct.name + "] is not final"); + throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "]. [" + fieldName + "]] must be final"); } - PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName); + PainlessField painlessField = painlessClassBuilder.staticMembers.get(painlessFieldKey); if (painlessField == null) { painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), - key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), null, null)); - ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField); - } else if (painlessField.clazz != painlessFieldClass) { - throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " + - "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]"); + new PainlessFieldCacheKey(targetClass, fieldName, typeParameter), + key -> new PainlessField(fieldName, javaField.getName(), targetClass, + typeParameter, javaField.getModifiers(), null, null)); + + painlessClassBuilder.staticMembers.put(painlessFieldKey, painlessField); + } else if (painlessField.clazz != typeParameter) { + throw new IllegalArgumentException("cannot have static fields " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + + typeToCanonicalTypeName(typeParameter) + "] and " + + "[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " + + typeToCanonicalTypeName(painlessField.clazz) + "] " + + "with the same and different type parameters"); } } else { - MethodHandle javaMethodHandleGetter; - MethodHandle javaMethodHandleSetter; + MethodHandle methodHandleGetter; try { - if (Modifier.isStatic(javaField.getModifiers()) == false) { - javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); - javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField); - } else { - javaMethodHandleGetter = null; - javaMethodHandleSetter = null; - } - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" + - " not found for class [" + ownerStruct.clazz.getName() + "]."); + methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException( + "method handle getter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } - PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName); + MethodHandle methodHandleSetter; - if (painlessField == null) { - painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), - key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); - ownerStruct.members.put(whitelistField.javaFieldName, painlessField); - } else if (painlessField.clazz != painlessFieldClass) { - throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " + - "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]"); - } - } - } - - private void copyStruct(String struct, List children) { - final PainlessClassBuilder owner = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(struct)); - - if (owner == null) { - throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); - } - - for (int count = 0; count < children.size(); ++count) { - final PainlessClassBuilder child = - classesToPainlessClasses.get(canonicalClassNamesToClasses.get(children.get(count))); - - if (child == null) { - throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + - " not defined for copy to owner struct [" + owner.name + "]."); - } - - if (!child.clazz.isAssignableFrom(owner.clazz)) { - throw new ClassCastException("Child struct [" + child.name + "]" + - " is not a super type of owner struct [" + owner.name + "] in copy."); - } - - for (Map.Entry kvPair : child.methods.entrySet()) { - String methodKey = kvPair.getKey(); - PainlessMethod method = kvPair.getValue(); - if (owner.methods.get(methodKey) == null) { - // TODO: some of these are no longer valid or outright don't work - // TODO: since classes may not come from the Painless classloader - // TODO: and it was dependent on the order of the extends which - // TODO: which no longer exists since this is generated automatically - // sanity check, look for missing covariant/generic override - /*if (owner.clazz.isInterface() && child.clazz == Object.class) { - // ok - } else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) { - // ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!) - } else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) { - // ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra: - // https://bugs.openjdk.java.net/browse/JDK-8072746 - } else { - try { - // TODO: we *have* to remove all these public members and use getter methods to encapsulate! - final Class impl; - final Class arguments[]; - if (method.augmentation != null) { - impl = method.augmentation; - arguments = new Class[method.arguments.size() + 1]; - arguments[0] = method.owner.clazz; - for (int i = 0; i < method.arguments.size(); i++) { - arguments[i + 1] = method.arguments.get(i).clazz; - } - } else { - impl = owner.clazz; - arguments = new Class[method.arguments.size()]; - for (int i = 0; i < method.arguments.size(); i++) { - arguments[i] = method.arguments.get(i).clazz; - } - } - java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments); - if (m.getReturnType() != method.rtn.clazz) { - throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name); - } - if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) { - // its a bridge in the destination, but not in the source, but it might still be ok, check generics: - java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments); - if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) { - throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name); - } - } - } catch (ReflectiveOperationException e) { - throw new AssertionError(e); - } - }*/ - owner.methods.put(methodKey, method); - } + try { + methodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException( + "method handle setter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } - for (PainlessField field : child.members.values()) { - if (owner.members.get(field.name) == null) { - owner.members.put(field.name, new PainlessField( - field.name, field.javaName, owner.clazz, field.clazz, field.modifiers, field.getter, field.setter)); - } - } - } - } + PainlessField painlessField = painlessClassBuilder.members.get(painlessFieldKey); - /** - * Precomputes a more efficient structure for dynamic method/field access. - */ - private void addRuntimeClass(final PainlessClassBuilder struct) { - // add all getters/setters - for (Map.Entry method : struct.methods.entrySet()) { - String name = method.getValue().name; - PainlessMethod m = method.getValue(); - - if (m.arguments.size() == 0 && - name.startsWith("get") && - name.length() > 3 && - Character.isUpperCase(name.charAt(3))) { - StringBuilder newName = new StringBuilder(); - newName.append(Character.toLowerCase(name.charAt(3))); - newName.append(name.substring(4)); - struct.getters.putIfAbsent(newName.toString(), m.handle); - } else if (m.arguments.size() == 0 && - name.startsWith("is") && - name.length() > 2 && - Character.isUpperCase(name.charAt(2))) { - StringBuilder newName = new StringBuilder(); - newName.append(Character.toLowerCase(name.charAt(2))); - newName.append(name.substring(3)); - struct.getters.putIfAbsent(newName.toString(), m.handle); - } - - if (m.arguments.size() == 1 && - name.startsWith("set") && - name.length() > 3 && - Character.isUpperCase(name.charAt(3))) { - StringBuilder newName = new StringBuilder(); - newName.append(Character.toLowerCase(name.charAt(3))); - newName.append(name.substring(4)); - struct.setters.putIfAbsent(newName.toString(), m.handle); - } - } - - // add all members - for (Map.Entry member : struct.members.entrySet()) { - struct.getters.put(member.getKey(), member.getValue().getter); - struct.setters.put(member.getKey(), member.getValue().setter); - } - } + if (painlessField == null) { + painlessField = painlessFieldCache.computeIfAbsent( + new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter), + key -> new PainlessField(fieldName, javaField.getName(), targetClass, + typeParameter, javaField.getModifiers(), methodHandleGetter, methodHandleSetter)); - /** computes the functional interface method for a class, or returns null */ - private PainlessMethod computeFunctionalInterfaceMethod(PainlessClassBuilder clazz) { - if (!clazz.clazz.isInterface()) { - return null; - } - // if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug) - // otherwise, this annotation is pretty useless. - boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class); - List methods = new ArrayList<>(); - for (java.lang.reflect.Method m : clazz.clazz.getMethods()) { - // default interface methods don't count - if (m.isDefault()) { - continue; - } - // static methods don't count - if (Modifier.isStatic(m.getModifiers())) { - continue; - } - // if its from Object, it doesn't count - try { - Object.class.getMethod(m.getName(), m.getParameterTypes()); - continue; - } catch (ReflectiveOperationException e) { - // it counts + painlessClassBuilder.members.put(fieldName, painlessField); + } else if (painlessField.clazz != typeParameter) { + throw new IllegalArgumentException("cannot have fields " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + + typeToCanonicalTypeName(typeParameter) + "] and " + + "[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " + + typeToCanonicalTypeName(painlessField.clazz) + "] " + + "with the same and different type parameters"); } - methods.add(m); } - if (methods.size() != 1) { - if (hasAnnotation) { - throw new IllegalArgumentException("Class: " + clazz.name + - " is marked with FunctionalInterface but doesn't fit the bill: " + methods); - } - return null; - } - // inspect the one method found from the reflection API, it should match the whitelist! - java.lang.reflect.Method oneMethod = methods.get(0); - PainlessMethod painless = clazz.methods.get(buildPainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); - if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) { - throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " + - "method is not whitelisted!"); - } - return painless; } public PainlessLookup build() { @@ -690,19 +675,19 @@ public PainlessLookup build() { for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); PainlessClassBuilder painlessStruct = - classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); + classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName)); if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + - "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]"); + "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]"); } origin = whitelistStruct.origin; addPainlessClass( whitelist.javaClassLoader, whitelistStruct.javaClassName, whitelistStruct.onlyFQNJavaClassName == false); - painlessStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); - classesToPainlessClasses.put(painlessStruct.clazz, painlessStruct); + painlessStruct = classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName)); + classesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct); } } @@ -715,17 +700,19 @@ public PainlessLookup build() { for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) { origin = whitelistConstructor.origin; - addConstructor(painlessTypeName, whitelistConstructor); + addPainlessConstructor(painlessTypeName, whitelistConstructor.painlessParameterTypeNames); } for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) { origin = whitelistMethod.origin; - addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod); + addPainlessMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod.javaAugmentedClassName, + whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName, + whitelistMethod.painlessParameterTypeNames); } for (WhitelistField whitelistField : whitelistStruct.whitelistFields) { origin = whitelistField.origin; - addField(painlessTypeName, whitelistField); + addPainlessField(painlessTypeName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName); } } } @@ -733,78 +720,144 @@ public PainlessLookup build() { throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); } - // goes through each Painless struct and determines the inheritance list, - // and then adds all inherited types to the Painless struct's whitelist - for (Class javaClass : classesToPainlessClasses.keySet()) { - PainlessClassBuilder painlessStruct = classesToPainlessClasses.get(javaClass); + copyPainlessClassMembers(); + cacheRuntimeHandles(); + setFunctionalInterfaceMethods(); + + Map, PainlessClass> classesToPainlessClasses = new HashMap<>(classesToPainlessClassBuilders.size()); - List painlessSuperStructs = new ArrayList<>(); - Class javaSuperClass = painlessStruct.clazz.getSuperclass(); + for (Map.Entry, PainlessClassBuilder> painlessClassBuilderEntry : classesToPainlessClassBuilders.entrySet()) { + classesToPainlessClasses.put(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue().build()); + } - Stack> javaInteraceLookups = new Stack<>(); - javaInteraceLookups.push(painlessStruct.clazz); + return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses); + } - // adds super classes to the inheritance list - if (javaSuperClass != null && javaSuperClass.isInterface() == false) { - while (javaSuperClass != null) { - PainlessClassBuilder painlessSuperStruct = classesToPainlessClasses.get(javaSuperClass); + private void copyPainlessClassMembers() { + for (Class parentClass : classesToPainlessClassBuilders.keySet()) { + copyPainlessInterfaceMembers(parentClass, parentClass); - if (painlessSuperStruct != null) { - painlessSuperStructs.add(painlessSuperStruct.name); - } + Class childClass = parentClass.getSuperclass(); - javaInteraceLookups.push(javaSuperClass); - javaSuperClass = javaSuperClass.getSuperclass(); + while (childClass != null) { + if (classesToPainlessClassBuilders.containsKey(childClass)) { + copyPainlessClassMembers(childClass, parentClass); } + + copyPainlessInterfaceMembers(childClass, parentClass); + childClass = childClass.getSuperclass(); + } + } + + for (Class javaClass : classesToPainlessClassBuilders.keySet()) { + if (javaClass.isInterface()) { + copyPainlessClassMembers(Object.class, javaClass); + } + } + } + + private void copyPainlessInterfaceMembers(Class parentClass, Class targetClass) { + for (Class childClass : parentClass.getInterfaces()) { + if (classesToPainlessClassBuilders.containsKey(childClass)) { + copyPainlessClassMembers(childClass, targetClass); } - // adds all super interfaces to the inheritance list - while (javaInteraceLookups.isEmpty() == false) { - Class javaInterfaceLookup = javaInteraceLookups.pop(); + copyPainlessInterfaceMembers(childClass, targetClass); + } + } - for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClasses.get(javaSuperInterface); + private void copyPainlessClassMembers(Class originalClass, Class targetClass) { + PainlessClassBuilder originalPainlessClassBuilder = classesToPainlessClassBuilders.get(originalClass); + PainlessClassBuilder targetPainlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); - if (painlessInterfaceStruct != null) { - String painlessInterfaceStructName = painlessInterfaceStruct.name; + Objects.requireNonNull(originalPainlessClassBuilder); + Objects.requireNonNull(targetPainlessClassBuilder); - if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) { - painlessSuperStructs.add(painlessInterfaceStructName); - } + for (Map.Entry painlessMethodEntry : originalPainlessClassBuilder.methods.entrySet()) { + String painlessMethodKey = painlessMethodEntry.getKey(); + PainlessMethod newPainlessMethod = painlessMethodEntry.getValue(); + PainlessMethod existingPainlessMethod = targetPainlessClassBuilder.methods.get(painlessMethodKey); - for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) { - javaInteraceLookups.push(javaPushInterface); - } - } - } + if (existingPainlessMethod == null || existingPainlessMethod.target != newPainlessMethod.target && + existingPainlessMethod.target.isAssignableFrom(newPainlessMethod.target)) { + targetPainlessClassBuilder.methods.put(painlessMethodKey, newPainlessMethod); } + } - // copies methods and fields from super structs to the parent struct - copyStruct(painlessStruct.name, painlessSuperStructs); - - // copies methods and fields from Object into interface types - if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - PainlessClassBuilder painlessObjectStruct = classesToPainlessClasses.get(Object.class); + for (Map.Entry painlessFieldEntry : originalPainlessClassBuilder.members.entrySet()) { + String painlessFieldKey = painlessFieldEntry.getKey(); + PainlessField newPainlessField = painlessFieldEntry.getValue(); + PainlessField existingPainlessField = targetPainlessClassBuilder.members.get(painlessFieldKey); - if (painlessObjectStruct != null) { - copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); - } + if (existingPainlessField == null || existingPainlessField.target != newPainlessField.target && + existingPainlessField.target.isAssignableFrom(newPainlessField.target)) { + targetPainlessClassBuilder.members.put(painlessFieldKey, newPainlessField); } } + } - // precompute runtime classes - for (PainlessClassBuilder painlessStruct : classesToPainlessClasses.values()) { - addRuntimeClass(painlessStruct); + private void cacheRuntimeHandles() { + for (PainlessClassBuilder painlessClassBuilder : classesToPainlessClassBuilders.values()) { + cacheRuntimeHandles(painlessClassBuilder); } + } + + private void cacheRuntimeHandles(PainlessClassBuilder painlessClassBuilder) { + for (PainlessMethod painlessMethod : painlessClassBuilder.methods.values()) { + String methodName = painlessMethod.name; + int typeParametersSize = painlessMethod.arguments.size(); - Map, PainlessClass> javaClassesToPainlessClasses = new HashMap<>(); + if (typeParametersSize == 0 && methodName.startsWith("get") && methodName.length() > 3 && + Character.isUpperCase(methodName.charAt(3))) { + painlessClassBuilder.getters.putIfAbsent( + Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle); + } else if (typeParametersSize == 0 && methodName.startsWith("is") && methodName.length() > 2 && + Character.isUpperCase(methodName.charAt(2))) { + painlessClassBuilder.getters.putIfAbsent( + Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3), painlessMethod.handle); + } else if (typeParametersSize == 1 && methodName.startsWith("set") && methodName.length() > 3 && + Character.isUpperCase(methodName.charAt(3))) { + painlessClassBuilder.setters.putIfAbsent( + Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle); + } + } - // copy all structs to make them unmodifiable for outside users: - for (Map.Entry,PainlessClassBuilder> entry : classesToPainlessClasses.entrySet()) { - entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue()); - javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build()); + for (PainlessField painlessField : painlessClassBuilder.members.values()) { + painlessClassBuilder.getters.put(painlessField.name, painlessField.getter); + painlessClassBuilder.setters.put(painlessField.name, painlessField.setter); } + } + + private void setFunctionalInterfaceMethods() { + for (Map.Entry, PainlessClassBuilder> painlessClassBuilderEntry : classesToPainlessClassBuilders.entrySet()) { + setFunctionalInterfaceMethod(painlessClassBuilderEntry.getValue()); + } + } - return new PainlessLookup(canonicalClassNamesToClasses, javaClassesToPainlessClasses); + private void setFunctionalInterfaceMethod(PainlessClassBuilder painlessClassBuilder) { + Class targetClass = painlessClassBuilder.clazz; + + if (targetClass.isInterface()) { + List javaMethods = new ArrayList<>(); + + for (java.lang.reflect.Method javaMethod : targetClass.getMethods()) { + if (javaMethod.isDefault() == false && Modifier.isStatic(javaMethod.getModifiers()) == false) { + try { + Object.class.getMethod(javaMethod.getName(), javaMethod.getParameterTypes()); + } catch (ReflectiveOperationException roe) { + javaMethods.add(javaMethod); + } + } + } + + if (javaMethods.size() != 1 && targetClass.isAnnotationPresent(FunctionalInterface.class)) { + throw new IllegalArgumentException("class [" + typeToCanonicalTypeName(targetClass) + "] " + + "is illegally marked as a FunctionalInterface with java methods " + javaMethods); + } else if (javaMethods.size() == 1) { + java.lang.reflect.Method javaMethod = javaMethods.get(0); + String painlessMethodKey = buildPainlessMethodKey(javaMethod.getName(), javaMethod.getParameterCount()); + painlessClassBuilder.functionalMethod = painlessClassBuilder.methods.get(painlessMethodKey); + } + } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index 1f698b7c673f5..86d3f87663867 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -33,10 +33,12 @@ * * A class is a set of methods and fields under a specific class name. A type is either a class or an array under a specific type name. * Note the distinction between class versus type is class means that no array classes will be be represented whereas type allows array - * classes to be represented. The set of available classes will always be a subset of the available types. + * classes to be represented. The set of available classes will always be a subset of the available types. * * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. If the variable value is the same for asm, - * java, and painless, no prefix is used. + * java, and painless, no prefix is used. Target is used as a prefix to represent if a constructor, method, or field is being + * called/accessed on that specific class. Parameter is often a postfix used to represent if a type is used as a parameter to a + * constructor, method, or field. * *

    *
  • - javaClassName (String) - the fully qualified java class name where '$' tokens represent inner classes excluding @@ -150,8 +152,8 @@ public static String typeToCanonicalTypeName(Class type) { String canonicalTypeName = type.getCanonicalName(); - if (canonicalTypeName.startsWith(def.class.getName())) { - canonicalTypeName = canonicalTypeName.replace(def.class.getName(), DEF_TYPE_NAME); + if (canonicalTypeName.startsWith(def.class.getCanonicalName())) { + canonicalTypeName = canonicalTypeName.replace(def.class.getCanonicalName(), DEF_CLASS_NAME); } return canonicalTypeName; @@ -351,7 +353,7 @@ public static String buildPainlessFieldKey(String fieldName) { /** * The def type name as specified in the source for a script. */ - public static final String DEF_TYPE_NAME = "def"; + public static final String DEF_CLASS_NAME = "def"; /** * The method name for all constructors. diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt index a793ef847f9c7..ef2d462127f36 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt @@ -148,7 +148,7 @@ class java.lang.Character { int MAX_RADIX char MAX_SURROGATE char MAX_VALUE - char MIN_CODE_POINT + int MIN_CODE_POINT char MIN_HIGH_SURROGATE char MIN_LOW_SURROGATE int MIN_RADIX diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java index 5d881632deeee..d0d0b2165ca10 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java @@ -26,7 +26,7 @@ public class InitializerTests extends ScriptTestCase { - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testArrayInitializers() { int[] ints = (int[])exec("new int[] {}"); @@ -59,7 +59,7 @@ public void testArrayInitializers() { assertEquals("aaaaaa", objects[3]); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testListInitializers() { List list = (List)exec("[]"); @@ -91,7 +91,7 @@ public void testListInitializers() { assertEquals("aaaaaa", list.get(3)); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testMapInitializers() { Map map = (Map)exec("[:]"); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java index cab3237732301..a6a6830a99c5f 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java @@ -126,8 +126,6 @@ public Optional forcedSearchSize() { @Override public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List ratedDocs) { - List allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed() - .collect(Collectors.toList()); List ratedHits = joinHitsWithRatings(hits, ratedDocs); List ratingsInSearchHits = new ArrayList<>(ratedHits.size()); int unratedResults = 0; @@ -144,6 +142,8 @@ public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, double idcg = 0; if (normalize) { + List allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed() + .collect(Collectors.toList()); Collections.sort(allRatings, Comparator.nullsLast(Collections.reverseOrder())); idcg = computeDCG(allRatings.subList(0, Math.min(ratingsInSearchHits.size(), allRatings.size()))); if (idcg != 0) { diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java index 91ba1ce61692b..f065a34787cbe 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java @@ -41,19 +41,19 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { private final String queryId; - private final double evaluationResult; + private final double metricScore; private MetricDetail optionalMetricDetails; private final List ratedHits; - public EvalQueryQuality(String id, double evaluationResult) { + public EvalQueryQuality(String id, double metricScore) { this.queryId = id; - this.evaluationResult = evaluationResult; + this.metricScore = metricScore; this.ratedHits = new ArrayList<>(); } public EvalQueryQuality(StreamInput in) throws IOException { this.queryId = in.readString(); - this.evaluationResult = in.readDouble(); + this.metricScore = in.readDouble(); this.ratedHits = in.readList(RatedSearchHit::new); this.optionalMetricDetails = in.readOptionalNamedWriteable(MetricDetail.class); } @@ -61,7 +61,7 @@ public EvalQueryQuality(StreamInput in) throws IOException { // only used for parsing internally private EvalQueryQuality(String queryId, ParsedEvalQueryQuality builder) { this.queryId = queryId; - this.evaluationResult = builder.evaluationResult; + this.metricScore = builder.evaluationResult; this.optionalMetricDetails = builder.optionalMetricDetails; this.ratedHits = builder.ratedHits; } @@ -69,7 +69,7 @@ private EvalQueryQuality(String queryId, ParsedEvalQueryQuality builder) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(queryId); - out.writeDouble(evaluationResult); + out.writeDouble(metricScore); out.writeList(ratedHits); out.writeOptionalNamedWriteable(this.optionalMetricDetails); } @@ -78,8 +78,8 @@ public String getId() { return queryId; } - public double getQualityLevel() { - return evaluationResult; + public double metricScore() { + return metricScore; } public void setMetricDetails(MetricDetail breakdown) { @@ -101,7 +101,7 @@ public List getHitsAndRatings() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(queryId); - builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult); + builder.field(METRIC_SCORE_FIELD.getPreferredName(), this.metricScore); builder.startArray(UNRATED_DOCS_FIELD.getPreferredName()); for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) { builder.startObject(); @@ -122,7 +122,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level"); + static final ParseField METRIC_SCORE_FIELD = new ParseField("metric_score"); private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs"); private static final ParseField HITS_FIELD = new ParseField("hits"); private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details"); @@ -136,7 +136,7 @@ private static class ParsedEvalQueryQuality { } static { - PARSER.declareDouble((obj, value) -> obj.evaluationResult = value, QUALITY_LEVEL_FIELD); + PARSER.declareDouble((obj, value) -> obj.evaluationResult = value, METRIC_SCORE_FIELD); PARSER.declareObject((obj, value) -> obj.optionalMetricDetails = value, (p, c) -> parseMetricDetail(p), METRIC_DETAILS_FIELD); PARSER.declareObjectArray((obj, list) -> obj.ratedHits = list, (p, c) -> RatedSearchHit.parse(p), HITS_FIELD); @@ -164,13 +164,13 @@ public final boolean equals(Object obj) { } EvalQueryQuality other = (EvalQueryQuality) obj; return Objects.equals(queryId, other.queryId) && - Objects.equals(evaluationResult, other.evaluationResult) && + Objects.equals(metricScore, other.metricScore) && Objects.equals(ratedHits, other.ratedHits) && Objects.equals(optionalMetricDetails, other.optionalMetricDetails); } @Override public final int hashCode() { - return Objects.hash(queryId, evaluationResult, ratedHits, optionalMetricDetails); + return Objects.hash(queryId, metricScore, ratedHits, optionalMetricDetails); } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java index 37898fd951638..d1e8989047716 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java @@ -39,23 +39,22 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { /** - * Returns a single metric representing the ranking quality of a set of returned - * documents wrt. to a set of document ids labeled as relevant for this search. + * Evaluates a single ranking evaluation case. * * @param taskId - * the id of the query for which the ranking is currently evaluated + * an identifier of the query for which the search ranking is + * evaluated * @param hits - * the result hits as returned by a search request + * the search result hits * @param ratedDocs - * the documents that were ranked by human annotators for this query - * case - * @return some metric representing the quality of the result hit list wrt. to - * relevant doc ids. + * the documents that contain the document rating for this query case + * @return an {@link EvalQueryQuality} instance that contains the metric score + * with respect to the provided search hits and ratings */ EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List ratedDocs); /** - * join hits with rated documents using the joint _index/_id document key + * Joins hits with rated documents using the joint _index/_id document key. */ static List joinHitsWithRatings(SearchHit[] hits, List ratedDocs) { Map ratedDocumentMap = ratedDocs.stream() @@ -74,7 +73,7 @@ static List joinHitsWithRatings(SearchHit[] hits, List filterUnratedDocuments(List ratedHits) { return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false) @@ -82,11 +81,11 @@ static List filterUnratedDocuments(List ratedHits) } /** - * how evaluation metrics for particular search queries get combined for the overall evaluation score. - * Defaults to averaging over the partial results. + * Combine several {@link EvalQueryQuality} results into the overall evaluation score. + * This defaults to averaging over the partial results, but can be overwritten to obtain a different behavior. */ default double combine(Collection partialResults) { - return partialResults.stream().mapToDouble(EvalQueryQuality::getQualityLevel).sum() / partialResults.size(); + return partialResults.stream().mapToDouble(EvalQueryQuality::metricScore).sum() / partialResults.size(); } /** diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java index 4aac29f299d67..39e1266504d9a 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java @@ -65,6 +65,9 @@ public class ExpectedReciprocalRank implements EvaluationMetric { public static final String NAME = "expected_reciprocal_rank"; + /** + * @param maxRelevance the highest expected relevance in the data + */ public ExpectedReciprocalRank(int maxRelevance) { this(maxRelevance, null, DEFAULT_K); } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java index eb20dc8c680f9..5781f13dafe0c 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java @@ -110,8 +110,7 @@ public int getRelevantRatingThreshold() { * Compute ReciprocalRank based on provided relevant document IDs. **/ @Override - public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, - List ratedDocs) { + public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List ratedDocs) { List ratedHits = joinHitsWithRatings(hits, ratedDocs); int firstRelevant = -1; int rank = 1; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java index f2176113cdf9d..7eddcf9dff644 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java @@ -37,12 +37,17 @@ public List getNamedXContentParsers() { MeanReciprocalRank::fromXContent)); namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(DiscountedCumulativeGain.NAME), DiscountedCumulativeGain::fromXContent)); + namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(ExpectedReciprocalRank.NAME), + ExpectedReciprocalRank::fromXContent)); + namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(PrecisionAtK.NAME), PrecisionAtK.Detail::fromXContent)); namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(MeanReciprocalRank.NAME), MeanReciprocalRank.Detail::fromXContent)); namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(DiscountedCumulativeGain.NAME), DiscountedCumulativeGain.Detail::fromXContent)); + namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(ExpectedReciprocalRank.NAME), + ExpectedReciprocalRank.Detail::fromXContent)); return namedXContent; } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java index 8ac2b7fbee528..0e5d754778f84 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java @@ -60,10 +60,14 @@ public List getNamedWriteables() { namedWriteables.add(new NamedWriteableRegistry.Entry(EvaluationMetric.class, MeanReciprocalRank.NAME, MeanReciprocalRank::new)); namedWriteables.add( new NamedWriteableRegistry.Entry(EvaluationMetric.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain::new)); + namedWriteables.add( + new NamedWriteableRegistry.Entry(EvaluationMetric.class, ExpectedReciprocalRank.NAME, ExpectedReciprocalRank::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(MetricDetail.class, PrecisionAtK.NAME, PrecisionAtK.Detail::new)); namedWriteables.add(new NamedWriteableRegistry.Entry(MetricDetail.class, MeanReciprocalRank.NAME, MeanReciprocalRank.Detail::new)); namedWriteables.add( new NamedWriteableRegistry.Entry(MetricDetail.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain.Detail::new)); + namedWriteables.add( + new NamedWriteableRegistry.Entry(MetricDetail.class, ExpectedReciprocalRank.NAME, ExpectedReciprocalRank.Detail::new)); return namedWriteables; } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java index 6dd3c1338fa6a..6efff154b6253 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java @@ -48,15 +48,15 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject { /** The overall evaluation result. */ - private double evaluationResult; + private double metricScore; /** details about individual ranking evaluation queries, keyed by their id */ private Map details; /** exceptions for specific ranking evaluation queries, keyed by their id */ private Map failures; - public RankEvalResponse(double qualityLevel, Map partialResults, + public RankEvalResponse(double metricScore, Map partialResults, Map failures) { - this.evaluationResult = qualityLevel; + this.metricScore = metricScore; this.details = new HashMap<>(partialResults); this.failures = new HashMap<>(failures); } @@ -65,8 +65,8 @@ public RankEvalResponse(double qualityLevel, Map parti // only used in RankEvalAction#newResponse() } - public double getEvaluationResult() { - return evaluationResult; + public double getMetricScore() { + return metricScore; } public Map getPartialResults() { @@ -85,7 +85,7 @@ public String toString() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeDouble(evaluationResult); + out.writeDouble(metricScore); out.writeVInt(details.size()); for (String queryId : details.keySet()) { out.writeString(queryId); @@ -101,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - this.evaluationResult = in.readDouble(); + this.metricScore = in.readDouble(); int partialResultSize = in.readVInt(); this.details = new HashMap<>(partialResultSize); for (int i = 0; i < partialResultSize; i++) { @@ -120,7 +120,7 @@ public void readFrom(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("quality_level", evaluationResult); + builder.field("metric_score", metricScore); builder.startObject("details"); for (String key : details.keySet()) { details.get(key).toXContent(builder, params); @@ -137,7 +137,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level"); private static final ParseField DETAILS_FIELD = new ParseField("details"); private static final ParseField FAILURES_FIELD = new ParseField("failures"); @SuppressWarnings("unchecked") @@ -147,7 +146,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws ((List) a[1]).stream().collect(Collectors.toMap(EvalQueryQuality::getId, Function.identity())), ((List>) a[2]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)))); static { - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), QUALITY_LEVEL_FIELD); + PARSER.declareDouble(ConstructingObjectParser.constructorArg(), EvalQueryQuality.METRIC_SCORE_FIELD); PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> EvalQueryQuality.fromXContent(p, n), DETAILS_FIELD); PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index e768c2973330e..468a1ac2e5721 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -76,7 +76,7 @@ public void testDCGAt() { hits[i].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0, null)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); - assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).getQualityLevel(), DELTA); + assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA); /** * Check with normalization: to get the maximal possible dcg, sort documents by @@ -94,7 +94,7 @@ public void testDCGAt() { * idcg = 14.595390756454922 (sum of last column) */ dcg = new DiscountedCumulativeGain(true, null, 10); - assertEquals(EXPECTED_NDCG, dcg.evaluate("id", hits, rated).getQualityLevel(), DELTA); + assertEquals(EXPECTED_NDCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA); } /** @@ -127,7 +127,7 @@ public void testDCGAtSixMissingRatings() { } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, rated); - assertEquals(12.779642067948913, result.getQualityLevel(), DELTA); + assertEquals(12.779642067948913, result.metricScore(), DELTA); assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size()); /** @@ -146,7 +146,7 @@ public void testDCGAtSixMissingRatings() { * idcg = 13.347184833073591 (sum of last column) */ dcg = new DiscountedCumulativeGain(true, null, 10); - assertEquals(12.779642067948913 / 13.347184833073591, dcg.evaluate("id", hits, rated).getQualityLevel(), DELTA); + assertEquals(12.779642067948913 / 13.347184833073591, dcg.evaluate("id", hits, rated).metricScore(), DELTA); } /** @@ -184,7 +184,7 @@ public void testDCGAtFourMoreRatings() { } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); - assertEquals(12.392789260714371, result.getQualityLevel(), DELTA); + assertEquals(12.392789260714371, result.metricScore(), DELTA); assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size()); /** @@ -204,7 +204,7 @@ public void testDCGAtFourMoreRatings() { * idcg = 13.347184833073591 (sum of last column) */ dcg = new DiscountedCumulativeGain(true, null, 10); - assertEquals(12.392789260714371 / 13.347184833073591, dcg.evaluate("id", hits, ratedDocs).getQualityLevel(), DELTA); + assertEquals(12.392789260714371 / 13.347184833073591, dcg.evaluate("id", hits, ratedDocs).metricScore(), DELTA); } /** @@ -223,13 +223,13 @@ public void testNoResults() throws Exception { SearchHit[] hits = new SearchHit[0]; DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); - assertEquals(0.0d, result.getQualityLevel(), DELTA); + assertEquals(0.0d, result.metricScore(), DELTA); assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size()); // also check normalized dcg = new DiscountedCumulativeGain(true, null, 10); result = dcg.evaluate("id", hits, ratedDocs); - assertEquals(0.0d, result.getQualityLevel(), DELTA); + assertEquals(0.0d, result.metricScore(), DELTA); assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size()); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java index c9251bb80903d..7424542ac26aa 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java @@ -129,7 +129,7 @@ public void testEqualsAndHash() throws IOException { private static EvalQueryQuality mutateTestItem(EvalQueryQuality original) { String id = original.getId(); - double qualityLevel = original.getQualityLevel(); + double metricScore = original.metricScore(); List ratedHits = new ArrayList<>(original.getHitsAndRatings()); MetricDetail metricDetails = original.getMetricDetails(); switch (randomIntBetween(0, 3)) { @@ -137,7 +137,7 @@ private static EvalQueryQuality mutateTestItem(EvalQueryQuality original) { id = id + "_"; break; case 1: - qualityLevel = qualityLevel + 0.1; + metricScore = metricScore + 0.1; break; case 2: if (metricDetails == null) { @@ -152,7 +152,7 @@ private static EvalQueryQuality mutateTestItem(EvalQueryQuality original) { default: throw new IllegalStateException("The test should only allow four parameters mutated"); } - EvalQueryQuality evalQueryQuality = new EvalQueryQuality(id, qualityLevel); + EvalQueryQuality evalQueryQuality = new EvalQueryQuality(id, metricScore); evalQueryQuality.setMetricDetails(metricDetails); evalQueryQuality.addHitsAndRatings(ratedHits); return evalQueryQuality; diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java index e2be8696e66f1..fe33c246f7d7a 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -76,10 +76,10 @@ public void testERRAt() { Integer[] relevanceRatings = new Integer[] { 3, 2, 0, 1}; SearchHit[] hits = createSearchHits(rated, relevanceRatings); ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, 0, 3); - assertEquals(0.8984375, err.evaluate("id", hits, rated).getQualityLevel(), DELTA); + assertEquals(0.8984375, err.evaluate("id", hits, rated).metricScore(), DELTA); // take 4th rank into window err = new ExpectedReciprocalRank(3, 0, 4); - assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).getQualityLevel(), DELTA); + assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).metricScore(), DELTA); } /** @@ -102,11 +102,11 @@ public void testERRMissingRatings() { SearchHit[] hits = createSearchHits(rated, relevanceRatings); ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, null, 4); EvalQueryQuality evaluation = err.evaluate("id", hits, rated); - assertEquals(0.875 + 0.00390625, evaluation.getQualityLevel(), DELTA); + assertEquals(0.875 + 0.00390625, evaluation.metricScore(), DELTA); assertEquals(1, ((ExpectedReciprocalRank.Detail) evaluation.getMetricDetails()).getUnratedDocs()); // if we supply e.g. 2 as unknown docs rating, it should be the same as in the other test above err = new ExpectedReciprocalRank(3, 2, 4); - assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).getQualityLevel(), DELTA); + assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).metricScore(), DELTA); } private SearchHit[] createSearchHits(List rated, Integer[] relevanceRatings) { @@ -126,7 +126,7 @@ private SearchHit[] createSearchHits(List rated, Integer[] releva */ public void testNoResults() throws Exception { ExpectedReciprocalRank err = new ExpectedReciprocalRank(5, 0, 10); - assertEquals(0.0, err.evaluate("id", new SearchHit[0], Collections.emptyList()).getQualityLevel(), DELTA); + assertEquals(0.0, err.evaluate("id", new SearchHit[0], Collections.emptyList()).metricScore(), DELTA); } public void testParseFromXContent() throws IOException { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index f88b0cc663489..fdb64806d5c9e 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -95,14 +95,14 @@ public void testMaxAcceptableRank() { int rankAtFirstRelevant = relevantAt + 1; EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs); - assertEquals(1.0 / rankAtFirstRelevant, evaluation.getQualityLevel(), Double.MIN_VALUE); + assertEquals(1.0 / rankAtFirstRelevant, evaluation.metricScore(), Double.MIN_VALUE); assertEquals(rankAtFirstRelevant, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank()); // check that if we have fewer search hits than relevant doc position, - // we don't find any result and get 0.0 quality level + // we don't find any result and get 0.0 score reciprocalRank = new MeanReciprocalRank(); evaluation = reciprocalRank.evaluate("id", Arrays.copyOfRange(hits, 0, relevantAt), ratedDocs); - assertEquals(0.0, evaluation.getQualityLevel(), Double.MIN_VALUE); + assertEquals(0.0, evaluation.metricScore(), Double.MIN_VALUE); } public void testEvaluationOneRelevantInResults() { @@ -120,7 +120,7 @@ public void testEvaluationOneRelevantInResults() { } EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs); - assertEquals(1.0 / (relevantAt + 1), evaluation.getQualityLevel(), Double.MIN_VALUE); + assertEquals(1.0 / (relevantAt + 1), evaluation.metricScore(), Double.MIN_VALUE); assertEquals(relevantAt + 1, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank()); } @@ -140,7 +140,7 @@ public void testPrecisionAtFiveRelevanceThreshold() { MeanReciprocalRank reciprocalRank = new MeanReciprocalRank(2, 10); EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, rated); - assertEquals((double) 1 / 3, evaluation.getQualityLevel(), 0.00001); + assertEquals((double) 1 / 3, evaluation.metricScore(), 0.00001); assertEquals(3, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank()); } @@ -158,13 +158,13 @@ public void testEvaluationNoRelevantInResults() { SearchHit[] hits = createSearchHits(0, 9, "test"); List ratedDocs = new ArrayList<>(); EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs); - assertEquals(0.0, evaluation.getQualityLevel(), Double.MIN_VALUE); + assertEquals(0.0, evaluation.metricScore(), Double.MIN_VALUE); } public void testNoResults() throws Exception { SearchHit[] hits = new SearchHit[0]; EvalQueryQuality evaluated = (new MeanReciprocalRank()).evaluate("id", hits, Collections.emptyList()); - assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); + assertEquals(0.0d, evaluated.metricScore(), 0.00001); assertEquals(-1, ((MeanReciprocalRank.Detail) evaluated.getMetricDetails()).getFirstRelevantRank()); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index c0035d5dbb72e..73149d5a8aa7e 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -53,7 +53,7 @@ public void testPrecisionAtFiveCalculation() { List rated = new ArrayList<>(); rated.add(createRatedDoc("test", "0", RELEVANT_RATING_1)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated); - assertEquals(1, evaluated.getQualityLevel(), 0.00001); + assertEquals(1, evaluated.metricScore(), 0.00001); assertEquals(1, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(1, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } @@ -66,7 +66,7 @@ public void testPrecisionAtFiveIgnoreOneResult() { rated.add(createRatedDoc("test", "3", RELEVANT_RATING_1)); rated.add(createRatedDoc("test", "4", IRRELEVANT_RATING_0)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated); - assertEquals((double) 4 / 5, evaluated.getQualityLevel(), 0.00001); + assertEquals((double) 4 / 5, evaluated.metricScore(), 0.00001); assertEquals(4, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } @@ -85,7 +85,7 @@ public void testPrecisionAtFiveRelevanceThreshold() { rated.add(createRatedDoc("test", "4", 4)); PrecisionAtK precisionAtN = new PrecisionAtK(2, false, 5); EvalQueryQuality evaluated = precisionAtN.evaluate("id", toSearchHits(rated, "test"), rated); - assertEquals((double) 3 / 5, evaluated.getQualityLevel(), 0.00001); + assertEquals((double) 3 / 5, evaluated.metricScore(), 0.00001); assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } @@ -99,7 +99,7 @@ public void testPrecisionAtFiveCorrectIndex() { rated.add(createRatedDoc("test", "2", IRRELEVANT_RATING_0)); // the following search hits contain only the last three documents EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated.subList(2, 5), "test"), rated); - assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001); + assertEquals((double) 2 / 3, evaluated.metricScore(), 0.00001); assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } @@ -114,14 +114,14 @@ public void testIgnoreUnlabeled() { searchHits[2].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0, null)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); - assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001); + assertEquals((double) 2 / 3, evaluated.metricScore(), 0.00001); assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); // also try with setting `ignore_unlabeled` PrecisionAtK prec = new PrecisionAtK(1, true, 10); evaluated = prec.evaluate("id", searchHits, rated); - assertEquals((double) 2 / 2, evaluated.getQualityLevel(), 0.00001); + assertEquals((double) 2 / 2, evaluated.metricScore(), 0.00001); assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } @@ -133,14 +133,14 @@ public void testNoRatedDocs() throws Exception { hits[i].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0, null)); } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); - assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); + assertEquals(0.0d, evaluated.metricScore(), 0.00001); assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); // also try with setting `ignore_unlabeled` PrecisionAtK prec = new PrecisionAtK(1, true, 10); evaluated = prec.evaluate("id", hits, Collections.emptyList()); - assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); + assertEquals(0.0d, evaluated.metricScore(), 0.00001); assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } @@ -148,7 +148,7 @@ public void testNoRatedDocs() throws Exception { public void testNoResults() throws Exception { SearchHit[] hits = new SearchHit[0]; EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); - assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); + assertEquals(0.0d, evaluated.metricScore(), 0.00001); assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index 28200e7d5a0e6..7d594c852da5b 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -114,7 +114,7 @@ public void testPrecisionAtRequest() { // the expected Prec@ for the first query is 4/6 and the expected Prec@ for the // second is 1/6, divided by 2 to get the average double expectedPrecision = (1.0 / 6.0 + 4.0 / 6.0) / 2.0; - assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE); + assertEquals(expectedPrecision, response.getMetricScore(), Double.MIN_VALUE); Set> entrySet = response.getPartialResults().entrySet(); assertEquals(2, entrySet.size()); for (Entry entry : entrySet) { @@ -157,7 +157,7 @@ public void testPrecisionAtRequest() { // if we look only at top 3 documente, the expected P@3 for the first query is // 2/3 and the expected Prec@ for the second is 1/3, divided by 2 to get the average expectedPrecision = (1.0 / 3.0 + 2.0 / 3.0) / 2.0; - assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE); + assertEquals(expectedPrecision, response.getMetricScore(), Double.MIN_VALUE); } /** @@ -186,7 +186,7 @@ public void testDCGRequest() { new RankEvalRequest(task, new String[] { TEST_INDEX })); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); - assertEquals(DiscountedCumulativeGainTests.EXPECTED_DCG, response.getEvaluationResult(), 10E-14); + assertEquals(DiscountedCumulativeGainTests.EXPECTED_DCG, response.getMetricScore(), 10E-14); // test that a different window size k affects the result metric = new DiscountedCumulativeGain(false, null, 3); @@ -195,7 +195,7 @@ public void testDCGRequest() { builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { TEST_INDEX })); response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); - assertEquals(12.39278926071437, response.getEvaluationResult(), 10E-14); + assertEquals(12.39278926071437, response.getMetricScore(), 10E-14); } public void testMRRRequest() { @@ -218,7 +218,7 @@ public void testMRRRequest() { // the expected reciprocal rank for the berlin_query is 1/1 // dividing by 2 to get the average double expectedMRR = (1.0 + 1.0 / 5.0) / 2.0; - assertEquals(expectedMRR, response.getEvaluationResult(), 0.0); + assertEquals(expectedMRR, response.getMetricScore(), 0.0); // test that a different window size k affects the result metric = new MeanReciprocalRank(1, 3); @@ -231,7 +231,7 @@ public void testMRRRequest() { // the reciprocal rank for the berlin_query is 1/1 // dividing by 2 to get the average expectedMRR = 1.0 / 2.0; - assertEquals(expectedMRR, response.getEvaluationResult(), 0.0); + assertEquals(expectedMRR, response.getMetricScore(), 0.0); } /** diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 1e94e869d2594..673808f836976 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -102,7 +102,7 @@ public void testSerialization() throws IOException { try (StreamInput in = output.bytes().streamInput()) { RankEvalResponse deserializedResponse = new RankEvalResponse(); deserializedResponse.readFrom(in); - assertEquals(randomResponse.getEvaluationResult(), deserializedResponse.getEvaluationResult(), Double.MIN_VALUE); + assertEquals(randomResponse.getMetricScore(), deserializedResponse.getMetricScore(), Double.MIN_VALUE); assertEquals(randomResponse.getPartialResults(), deserializedResponse.getPartialResults()); assertEquals(randomResponse.getFailures().keySet(), deserializedResponse.getFailures().keySet()); assertNotSame(randomResponse, deserializedResponse); @@ -130,7 +130,7 @@ public void testXContentParsing() throws IOException { assertNotSame(testItem, parsedItem); // We cannot check equality of object here because some information (e.g. // SearchHit#shard) cannot fully be parsed back. - assertEquals(testItem.getEvaluationResult(), parsedItem.getEvaluationResult(), 0.0); + assertEquals(testItem.getMetricScore(), parsedItem.getMetricScore(), 0.0); assertEquals(testItem.getPartialResults().keySet(), parsedItem.getPartialResults().keySet()); for (EvalQueryQuality metricDetail : testItem.getPartialResults().values()) { EvalQueryQuality parsedEvalQueryQuality = parsedItem.getPartialResults().get(metricDetail.getId()); @@ -154,10 +154,10 @@ public void testToXContent() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString(); assertEquals(("{" + - " \"quality_level\": 0.123," + + " \"metric_score\": 0.123," + " \"details\": {" + " \"coffee_query\": {" + - " \"quality_level\": 0.1," + + " \"metric_score\": 0.1," + " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," + " \"rating\":5}," + diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml index 62c246fb32066..ebe23ae53f411 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml @@ -71,8 +71,8 @@ setup: "metric" : { "precision": { "ignore_unlabeled" : true }} } - - match: { quality_level: 1} - - match: { details.amsterdam_query.quality_level: 1.0} + - match: { metric_score: 1} + - match: { details.amsterdam_query.metric_score: 1.0} - match: { details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]} - match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}} @@ -84,7 +84,7 @@ setup: - match: { details.amsterdam_query.hits.2.hit._id: "doc4"} - is_false: details.amsterdam_query.hits.2.rating - - match: { details.berlin_query.quality_level: 1.0} + - match: { details.berlin_query.metric_score: 1.0} - match: { details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]} - match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}} - length: { details.berlin_query.hits: 2} @@ -118,9 +118,9 @@ setup: "metric" : { "precision": { "ignore_unlabeled" : true }} } - - match: { quality_level: 1} - - match: { details.amsterdam_query.quality_level: 1.0} - - match: { details.berlin_query.quality_level: 1.0} + - match: { metric_score: 1} + - match: { details.amsterdam_query.metric_score: 1.0} + - match: { details.berlin_query.metric_score: 1.0} --- "Mean Reciprocal Rank": @@ -150,14 +150,48 @@ setup: } # average is (1/3 + 1/2)/2 = 5/12 ~ 0.41666666666666663 - - gt: {quality_level: 0.416} - - lt: {quality_level: 0.417} - - gt: {details.amsterdam_query.quality_level: 0.333} - - lt: {details.amsterdam_query.quality_level: 0.334} + - gt: {metric_score: 0.416} + - lt: {metric_score: 0.417} + - gt: {details.amsterdam_query.metric_score: 0.333} + - lt: {details.amsterdam_query.metric_score: 0.334} - match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}} - match: {details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc2"}, {"_index": "foo", "_id": "doc3"} ]} - - match: {details.berlin_query.quality_level: 0.5} + - match: {details.berlin_query.metric_score: 0.5} - match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}} - match: {details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc1"}]} +--- +"Expected Reciprocal Rank": + + - skip: + version: " - 6.3.99" + reason: ERR was introduced in 6.4 + + - do: + rank_eval: + body: { + "requests" : [ + { + "id": "amsterdam_query", + "request": { "query": { "match" : {"text" : "amsterdam" }}}, + "ratings": [{"_index": "foo", "_id": "doc4", "rating": 1}] + }, + { + "id" : "berlin_query", + "request": { "query": { "match" : { "text" : "berlin" } }, "size" : 10 }, + "ratings": [{"_index": "foo", "_id": "doc4", "rating": 1}] + } + ], + "metric" : { + "expected_reciprocal_rank": { + "maximum_relevance" : 1, + "k" : 5 + } + } + } + + - gt: {metric_score: 0.2083333} + - lt: {metric_score: 0.2083334} + - match: {details.amsterdam_query.metric_details.expected_reciprocal_rank.unrated_docs: 2} + - match: {details.berlin_query.metric_details.expected_reciprocal_rank.unrated_docs: 1} diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml index baf10f1542cfb..1b159775d5c94 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml @@ -69,10 +69,10 @@ "metric" : { "dcg": {}} } - - gt: {quality_level: 13.848263 } - - lt: {quality_level: 13.848264 } - - gt: {details.dcg_query.quality_level: 13.848263} - - lt: {details.dcg_query.quality_level: 13.848264} + - gt: {metric_score: 13.848263 } + - lt: {metric_score: 13.848264 } + - gt: {details.dcg_query.metric_score: 13.848263} + - lt: {details.dcg_query.metric_score: 13.848264} - match: {details.dcg_query.unrated_docs: [ ]} # reverse the order in which the results are returned (less relevant docs first) @@ -96,10 +96,10 @@ "metric" : { "dcg": { }} } - - gt: {quality_level: 10.299674} - - lt: {quality_level: 10.299675} - - gt: {details.dcg_query_reverse.quality_level: 10.299674} - - lt: {details.dcg_query_reverse.quality_level: 10.299675} + - gt: {metric_score: 10.299674} + - lt: {metric_score: 10.299675} + - gt: {details.dcg_query_reverse.metric_score: 10.299674} + - lt: {details.dcg_query_reverse.metric_score: 10.299675} - match: {details.dcg_query_reverse.unrated_docs: [ ]} # if we mix both, we should get the average @@ -134,11 +134,11 @@ "metric" : { "dcg": { }} } - - gt: {quality_level: 12.073969} - - lt: {quality_level: 12.073970} - - gt: {details.dcg_query.quality_level: 13.848263} - - lt: {details.dcg_query.quality_level: 13.848264} + - gt: {metric_score: 12.073969} + - lt: {metric_score: 12.073970} + - gt: {details.dcg_query.metric_score: 13.848263} + - lt: {details.dcg_query.metric_score: 13.848264} - match: {details.dcg_query.unrated_docs: [ ]} - - gt: {details.dcg_query_reverse.quality_level: 10.299674} - - lt: {details.dcg_query_reverse.quality_level: 10.299675} + - gt: {details.dcg_query_reverse.metric_score: 10.299674} + - lt: {details.dcg_query_reverse.metric_score: 10.299675} - match: {details.dcg_query_reverse.unrated_docs: [ ]} diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml index d6119ad3a9e95..42627a2590e1c 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml @@ -34,8 +34,8 @@ "metric" : { "precision": { "ignore_unlabeled" : true }} } - - match: { quality_level: 1} - - match: { details.amsterdam_query.quality_level: 1.0} + - match: { metric_score: 1} + - match: { details.amsterdam_query.metric_score: 1.0} - match: { details.amsterdam_query.unrated_docs: [ ]} - match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}} diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml index 5e0082d213c90..fef25c3fc41a5 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml @@ -84,7 +84,7 @@ setup: "metric" : { "precision": { }} } - - match: {quality_level: 0.9} + - match: {metric_score: 0.9} - match: {details.amsterdam_query.unrated_docs.0._id: "6"} --- diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 8f09afbb17c6c..bf0adc6e1429f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -57,7 +57,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client } @Override - @SuppressWarnings("unchecked") protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException { /* * Passing the search request through UpdateByQueryRequest first allows diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index 3101f660d056e..50be7f7ce4509 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -181,27 +181,32 @@ class PossiblySlowRunnable implements Runnable { @Override public void run() { - final String uri = fullHttpRequest.uri(); - - final ByteBuf buffer = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8); - - Netty4HttpRequest httpRequest = new Netty4HttpRequest(fullHttpRequest, pipelinedRequest.getSequence()); - Netty4HttpResponse response = httpRequest.createResponse(RestStatus.OK, new BytesArray(uri.getBytes(StandardCharsets.UTF_8))); - response.headers().add(HttpHeaderNames.CONTENT_LENGTH, buffer.readableBytes()); - - final boolean slow = uri.matches("/slow/\\d+"); - if (slow) { - try { - Thread.sleep(scaledRandomIntBetween(500, 1000)); - } catch (InterruptedException e) { - throw new RuntimeException(e); + try { + final String uri = fullHttpRequest.uri(); + + final ByteBuf buffer = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8); + + Netty4HttpRequest httpRequest = new Netty4HttpRequest(fullHttpRequest, pipelinedRequest.getSequence()); + Netty4HttpResponse response = + httpRequest.createResponse(RestStatus.OK, new BytesArray(uri.getBytes(StandardCharsets.UTF_8))); + response.headers().add(HttpHeaderNames.CONTENT_LENGTH, buffer.readableBytes()); + + final boolean slow = uri.matches("/slow/\\d+"); + if (slow) { + try { + Thread.sleep(scaledRandomIntBetween(500, 1000)); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } else { + assert uri.matches("/\\d+"); } - } else { - assert uri.matches("/\\d+"); - } - final ChannelPromise promise = ctx.newPromise(); - ctx.writeAndFlush(response, promise); + final ChannelPromise promise = ctx.newPromise(); + ctx.writeAndFlush(response, promise); + } finally { + fullHttpRequest.release(); + } } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java index 760ac1253c6fe..8d628ace2ee38 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java @@ -90,7 +90,6 @@ protected MockTransportService build(Settings settings, Version version, Cluster @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { final Netty4Transport t = (Netty4Transport) transport; - @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/plugins/analysis-phonetic/src/test/resources/org/elasticsearch/index/analysis/phonetic-1.yml b/plugins/analysis-phonetic/src/test/resources/org/elasticsearch/index/analysis/phonetic-1.yml index 1909c7ee06390..14cdbbe2440ae 100644 --- a/plugins/analysis-phonetic/src/test/resources/org/elasticsearch/index/analysis/phonetic-1.yml +++ b/plugins/analysis-phonetic/src/test/resources/org/elasticsearch/index/analysis/phonetic-1.yml @@ -3,7 +3,7 @@ index: filter: doublemetaphonefilter: type: phonetic - encoder: doublemetaphone + encoder: double_metaphone metaphonefilter: type: phonetic encoder: metaphone @@ -12,16 +12,16 @@ index: encoder: soundex refinedsoundexfilter: type: phonetic - encoder: refinedsoundex + encoder: refined_soundex caverphonefilter: type: phonetic encoder: caverphone beidermorsefilter: type: phonetic - encoder: beidermorse + encoder: beider_morse beidermorsefilterfrench: type: phonetic - encoder: beidermorse + encoder: beider_morse languageset : [ "french" ] koelnerphonetikfilter: type: phonetic diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 82de9ba031b25..fa8005dfa4759 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -22,36 +22,6 @@ dependencies { compile "commons-codec:commons-codec:${versions.commonscodec}" } - -// needed to be consistent with ssl host checking -String host = InetAddress.getLoopbackAddress().getHostAddress(); - -// location of keystore and files to generate it -File keystore = new File(project.buildDir, 'keystore/test-node.jks') - -// generate the keystore -task createKey(type: LoggedExec) { - doFirst { - project.delete(keystore.parentFile) - keystore.parentFile.mkdirs() - } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) - args '-genkey', - '-alias', 'test-node', - '-keystore', keystore, - '-keyalg', 'RSA', - '-keysize', '2048', - '-validity', '712', - '-dname', 'CN=' + host, - '-keypass', 'keypass', - '-storepass', 'keypass' -} - -// add keystore to test classpath: it expects it there -sourceSets.test.resources.srcDir(keystore.parentFile) -processTestResources.dependsOn(createKey) - dependencyLicenses { mapping from: /google-.*/, to: 'google' } diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index b3ae34012aa38..8856ae1526a21 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -1,4 +1,4 @@ -1/* +/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright @@ -214,25 +214,6 @@ RestIntegTestTask integTestSecureHa = project.tasks.create('integTestSecureHa', description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode and secured by MIT Kerberos." } -if (rootProject.ext.compilerJavaVersion.isJava11()) { - // TODO remove when: https://github.com/elastic/elasticsearch/issues/31498 - integTestRunner { - systemProperty 'tests.rest.blacklist', [ - 'hdfs_repository/30_snapshot/take snapshot', - 'hdfs_repository/40_restore/Create a snapshot and then restore it', - 'hdfs_repository/20_repository_verify/HDFS Repository Verify', - 'hdfs_repository/30_snapshot_get/Get a snapshot', - 'hdfs_repository/20_repository_create/HDFS Repository Creation', - 'hdfs_repository/20_repository_delete/HDFS Delete Repository', - 'hdfs_repository/30_snapshot_readonly/Get a snapshot - readonly', - ].join(',') - } -} -if (rootProject.ext.runtimeJavaVersion.isJava11() || rootProject.ext.compilerJavaVersion.isJava11()) { - // TODO remove when: https://github.com/elastic/elasticsearch/issues/31498 - integTestHa.enabled = false -} - // Determine HDFS Fixture compatibility for the current build environment. boolean fixtureSupported = false if (Os.isFamily(Os.FAMILY_WINDOWS)) { diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy index f6476f290bc34..897596bbd8546 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy @@ -61,6 +61,7 @@ grant { // Hadoop depends on OS level user information for simple authentication // Unix: UnixLoginModule: com.sun.security.auth.module.UnixSystem.UnixSystem init + permission java.lang.RuntimePermission "loadLibrary.jaas"; permission java.lang.RuntimePermission "loadLibrary.jaas_unix"; // Windows: NTLoginModule: com.sun.security.auth.module.NTSystem.loadNative permission java.lang.RuntimePermission "loadLibrary.jaas_nt"; diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 181891e20564d..13119913672af 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -114,9 +114,7 @@ if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3P useFixture = true -} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath - || !s3EC2Bucket || !s3EC2BasePath - || !s3ECSBucket || !s3ECSBasePath) { +} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) { throw new IllegalArgumentException("not all options specified to run against external S3 service") } @@ -349,8 +347,13 @@ processTestResources { project.afterEvaluate { if (useFixture == false) { - // 30_repository_temporary_credentials is not ready for CI yet - integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*' + // temporary_credentials, ec2_credentials and ecs_credentials are not ready for third-party-tests yet + integTestRunner.systemProperty 'tests.rest.blacklist', + [ + 'repository_s3/30_repository_temporary_credentials/*', + 'repository_s3/40_repository_ec2_credentials/*', + 'repository_s3/50_repository_ecs_credentials/*' + ].join(",") } } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java index 5bda7e1b83d81..0a09b6b8789f7 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java @@ -32,6 +32,7 @@ import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpUtil; import io.netty.handler.codec.http.HttpVersion; + import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -89,7 +90,6 @@ public class HttpReadWriteHandlerTests extends ESTestCase { private final ResponseDecoder responseDecoder = new ResponseDecoder(); @Before - @SuppressWarnings("unchecked") public void setMocks() { transport = mock(NioHttpServerTransport.class); Settings settings = Settings.EMPTY; diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java index 090fc579c4899..9322bfd71222a 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java @@ -95,7 +95,6 @@ protected MockTransportService build(Settings settings, Version version, Cluster @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index de5681ebe1a29..062016909b651 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -47,6 +47,8 @@ * In depth testing of the recovery mechanism during a rolling restart. */ public class RecoveryIT extends AbstractRollingTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31291") public void testHistoryUUIDIsGenerated() throws Exception { final String index = "index_history_uuid"; if (CLUSTER_TYPE == ClusterType.OLD) { diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index 2056111554225..83edc8a0a9390 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -129,7 +129,7 @@ public void test30AbortWhenJavaMissing() { }); Platforms.onLinux(() -> { - final String javaPath = sh.run("which java").stdout.trim(); + final String javaPath = sh.run("command -v java").stdout.trim(); try { sh.run("chmod -x '" + javaPath + "'"); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java index 28a767e95aef2..95705e94a432c 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java @@ -30,16 +30,20 @@ import java.io.IOException; import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; +import static org.elasticsearch.packaging.util.FileUtils.mv; import static org.elasticsearch.packaging.util.Packages.SYSTEMD_SERVICE; import static org.elasticsearch.packaging.util.Packages.assertInstalled; import static org.elasticsearch.packaging.util.Packages.assertRemoved; import static org.elasticsearch.packaging.util.Packages.install; import static org.elasticsearch.packaging.util.Packages.remove; +import static org.elasticsearch.packaging.util.Packages.runInstallCommand; import static org.elasticsearch.packaging.util.Packages.startElasticsearch; import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; import static org.elasticsearch.packaging.util.Platforms.getOsRelease; @@ -75,6 +79,21 @@ public void onlyCompatibleDistributions() { assumeTrue("only compatible distributions", distribution().packaging.compatible); } + public void test05InstallFailsWhenJavaMissing() { + final Shell sh = new Shell(); + final Result java = sh.run("command -v java"); + + final Path originalJavaPath = Paths.get(java.stdout.trim()); + final Path relocatedJavaPath = originalJavaPath.getParent().resolve("java.relocated"); + try { + mv(originalJavaPath, relocatedJavaPath); + final Result installResult = runInstallCommand(distribution()); + assertThat(installResult.exitCode, is(1)); + } finally { + mv(relocatedJavaPath, originalJavaPath); + } + } + public void test10InstallPackage() { assertRemoved(distribution()); installation = install(distribution()); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java index 6e80d9e027df2..be7edc5e8f9e4 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java @@ -67,7 +67,10 @@ public static void assertRemoved(Distribution distribution) { Platforms.onDPKG(() -> { assertThat(status.exitCode, anyOf(is(0), is(1))); if (status.exitCode == 0) { - assertTrue(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find()); + assertTrue("an uninstalled status should be indicated: " + status.stdout, + Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find() || + Pattern.compile("(?m)^Status:.+ok not-installed").matcher(status.stdout).find() + ); } }); } @@ -90,13 +93,27 @@ public static Installation install(Distribution distribution) { } public static Installation install(Distribution distribution, String version) { + final Result result = runInstallCommand(distribution, version); + if (result.exitCode != 0) { + throw new RuntimeException("Installing distribution " + distribution + " version " + version + " failed: " + result); + } + + return Installation.ofPackage(distribution.packaging); + } + + public static Result runInstallCommand(Distribution distribution) { + return runInstallCommand(distribution, getCurrentVersion()); + } + + public static Result runInstallCommand(Distribution distribution, String version) { final Shell sh = new Shell(); final Path distributionFile = getDistributionFile(distribution, version); - Platforms.onRPM(() -> sh.run("rpm -i " + distributionFile)); - Platforms.onDPKG(() -> sh.run("dpkg -i " + distributionFile)); - - return Installation.ofPackage(distribution.packaging); + if (Platforms.isRPM()) { + return sh.runIgnoreExitCode("rpm -i " + distributionFile); + } else { + return sh.runIgnoreExitCode("dpkg -i " + distributionFile); + } } public static void remove(Distribution distribution) { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml new file mode 100644 index 0000000000000..6f8ea9565e346 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml @@ -0,0 +1,74 @@ +setup: + - skip: + version: " - 6.3.99" + reason: weighted_avg is only available as of 6.4.0 + - do: + indices.create: + index: test_1 + body: + settings: + number_of_replicas: 0 + mappings: + doc: + properties: + int_field: + type : integer + double_field: + type : double + string_field: + type: keyword + + - do: + bulk: + refresh: true + body: + - index: + _index: test_1 + _type: doc + _id: 1 + - int_field: 1 + double_field: 1.0 + - index: + _index: test_1 + _type: doc + _id: 2 + - int_field: 2 + double_field: 2.0 + - index: + _index: test_1 + _type: doc + _id: 3 + - int_field: 3 + double_field: 3.0 + - index: + _index: test_1 + _type: doc + _id: 4 + - int_field: 4 + double_field: 4.0 + +--- +"Basic test": + + - do: + search: + body: + aggs: + the_int_avg: + weighted_avg: + value: + field: "int_field" + weight: + field: "int_field" + the_double_avg: + weighted_avg: + value: + field: "double_field" + weight: + field: "double_field" + + - match: { hits.total: 4 } + - length: { hits.hits: 4 } + - match: { aggregations.the_int_avg.value: 3.0 } + - match: { aggregations.the_double_avg.value: 3.0 } + diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 281ba9eaee820..4c75f0a1c19e0 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -174,6 +174,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_6_3_1 = new Version(V_6_3_1_ID, org.apache.lucene.util.Version.LUCENE_7_3_1); public static final int V_6_3_2_ID = 6030299; public static final Version V_6_3_2 = new Version(V_6_3_2_ID, org.apache.lucene.util.Version.LUCENE_7_3_1); + public static final int V_6_3_3_ID = 6030399; + public static final Version V_6_3_3 = new Version(V_6_3_3_ID, org.apache.lucene.util.Version.LUCENE_7_3_1); public static final int V_6_4_0_ID = 6040099; public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_7_0_0_alpha1_ID = 7000001; @@ -196,6 +198,8 @@ public static Version fromId(int id) { return V_7_0_0_alpha1; case V_6_4_0_ID: return V_6_4_0; + case V_6_3_3_ID: + return V_6_3_3; case V_6_3_2_ID: return V_6_3_2; case V_6_3_1_ID: diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java index 19b0517d96c95..173a44b67cdfc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java @@ -48,7 +48,6 @@ public class ClusterGetSettingsResponse extends ActionResponse implements ToXCon static final String TRANSIENT_FIELD = "transient"; static final String DEFAULTS_FIELD = "defaults"; - @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "cluster_get_settings_response", diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index c1b8c73c9ef0f..53aa522772aac 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -27,14 +27,17 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; @@ -45,7 +48,7 @@ /** * Restore snapshot request */ -public class RestoreSnapshotRequest extends MasterNodeRequest { +public class RestoreSnapshotRequest extends MasterNodeRequest implements ToXContentObject { private String snapshot; private String repository; @@ -563,6 +566,49 @@ public RestoreSnapshotRequest source(Map source) { return this; } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray("indices"); + for (String index : indices) { + builder.value(index); + } + builder.endArray(); + if (indicesOptions != null) { + indicesOptions.toXContent(builder, params); + } + if (renamePattern != null) { + builder.field("rename_pattern", renamePattern); + } + if (renameReplacement != null) { + builder.field("rename_replacement", renameReplacement); + } + builder.field("include_global_state", includeGlobalState); + builder.field("partial", partial); + builder.field("include_aliases", includeAliases); + if (settings != null) { + builder.startObject("settings"); + if (settings.isEmpty() == false) { + settings.toXContent(builder, params); + } + builder.endObject(); + } + if (indexSettings != null) { + builder.startObject("index_settings"); + if (indexSettings.isEmpty() == false) { + indexSettings.toXContent(builder, params); + } + builder.endObject(); + } + builder.startArray("ignore_index_settings"); + for (String ignoreIndexSetting : ignoreIndexSettings) { + builder.value(ignoreIndexSetting); + } + builder.endArray(); + builder.endObject(); + return builder; + } + @Override public void readFrom(StreamInput in) throws IOException { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); @@ -573,4 +619,37 @@ public String getDescription() { return "snapshot [" + repository + ":" + snapshot + "]"; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreSnapshotRequest that = (RestoreSnapshotRequest) o; + return waitForCompletion == that.waitForCompletion && + includeGlobalState == that.includeGlobalState && + partial == that.partial && + includeAliases == that.includeAliases && + Objects.equals(snapshot, that.snapshot) && + Objects.equals(repository, that.repository) && + Arrays.equals(indices, that.indices) && + Objects.equals(indicesOptions, that.indicesOptions) && + Objects.equals(renamePattern, that.renamePattern) && + Objects.equals(renameReplacement, that.renameReplacement) && + Objects.equals(settings, that.settings) && + Objects.equals(indexSettings, that.indexSettings) && + Arrays.equals(ignoreIndexSettings, that.ignoreIndexSettings); + } + + @Override + public int hashCode() { + int result = Objects.hash(snapshot, repository, indicesOptions, renamePattern, renameReplacement, waitForCompletion, + includeGlobalState, partial, includeAliases, settings, indexSettings); + result = 31 * result + Arrays.hashCode(indices); + result = 31 * result + Arrays.hashCode(ignoreIndexSettings); + return result; + } + + @Override + public String toString() { + return Strings.toString(this); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java index 5a02e4bcb1387..171509c018228 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java @@ -21,15 +21,21 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.RestoreInfo; import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Contains information about restores snapshot @@ -86,4 +92,42 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par builder.endObject(); return builder; } + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "restore_snapshot", true, v -> { + RestoreInfo restoreInfo = (RestoreInfo) v[0]; + Boolean accepted = (Boolean) v[1]; + assert (accepted == null && restoreInfo != null) || + (accepted != null && accepted && restoreInfo == null) : + "accepted: [" + accepted + "], restoreInfo: [" + restoreInfo + "]"; + return new RestoreSnapshotResponse(restoreInfo); + }); + + static { + PARSER.declareObject(optionalConstructorArg(), (parser, context) -> RestoreInfo.fromXContent(parser), new ParseField("snapshot")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("accepted")); + } + + + public static RestoreSnapshotResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreSnapshotResponse that = (RestoreSnapshotResponse) o; + return Objects.equals(restoreInfo, that.restoreInfo); + } + + @Override + public int hashCode() { + return Objects.hash(restoreInfo); + } + + @Override + public String toString() { + return "RestoreSnapshotResponse{" + "restoreInfo=" + restoreInfo + '}'; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java index e330a0b8565fc..d0a62fe771d1f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java @@ -45,7 +45,6 @@ public class QueryExplanation implements Streamable, ToXContentFragment { public static final int RANDOM_SHARD = -1; - @SuppressWarnings("unchecked") static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "query_explanation", true, diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java index 9ee59cf70d05e..455aab7f6e3d6 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -129,7 +129,6 @@ public String getSourceAsString() { /** * The source of the document (As a map). */ - @SuppressWarnings({"unchecked"}) public Map getSourceAsMap() throws ElasticsearchParseException { return getResult.sourceAsMap(); } diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index d15b7b92d62aa..e0a6cd827863a 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.get; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -90,9 +89,9 @@ protected MultiGetShardResponse shardOperation(MultiGetShardRequest request, Sha GetResult getResult = indexShard.getService().get(item.type(), item.id(), item.storedFields(), request.realtime(), item.version(), item.versionType(), item.fetchSourceContext()); response.add(request.locations.get(i), new GetResponse(getResult)); - } catch (Exception e) { + } catch (RuntimeException e) { if (TransportActions.isShardNotAvailableException(e)) { - throw (ElasticsearchException) e; + throw e; } else { logger.debug(() -> new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId, item.type(), item.id()), e); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 101ce7ec260e1..3f41aaddfb7cb 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -32,8 +32,8 @@ import java.io.IOException; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class SimulateProcessorResult implements Writeable, ToXContentObject { @@ -42,7 +42,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject { private final WriteableIngestDocument ingestDocument; private final Exception failure; - @SuppressWarnings("unchecked") private static final ConstructingObjectParser IGNORED_ERROR_PARSER = new ConstructingObjectParser<>( "ignored_error_parser", @@ -57,7 +56,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject { ); } - @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "simulate_processor_result", diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 2430868bb5909..6331097024c69 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -94,7 +94,6 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { ); } - @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "writeable_ingest_document", diff --git a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java index cdd895ff8cd2c..8598ab3e4be06 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java +++ b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -205,7 +206,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") ActiveShardCount that = (ActiveShardCount) o; + ActiveShardCount that = (ActiveShardCount) o; return value == that.value; } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index 9dc7a899d033c..7b137fb418c2b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -72,7 +72,6 @@ public RequestBuilder setWaitForActiveShards(ActiveShardCount waitForActiveShard * shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)} * to get the ActiveShardCount. */ - @SuppressWarnings("unchecked") public RequestBuilder setWaitForActiveShards(final int waitForActiveShards) { return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards)); } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index b83ac3881fda5..f1641fdd25c98 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.termvectors; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -84,13 +83,13 @@ protected MultiTermVectorsShardResponse shardOperation(MultiTermVectorsShardRequ try { TermVectorsResponse termVectorsResponse = TermVectorsService.getTermVectors(indexShard, termVectorsRequest); response.add(request.locations.get(i), termVectorsResponse); - } catch (Exception t) { - if (TransportActions.isShardNotAvailableException(t)) { - throw (ElasticsearchException) t; + } catch (RuntimeException e) { + if (TransportActions.isShardNotAvailableException(e)) { + throw e; } else { - logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), t); + logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), e); response.add(request.locations.get(i), - new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t)); + new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), e)); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index 138788251c90a..066f00c2cd1c7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -165,7 +166,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") Entry entry = (Entry) o; + Entry entry = (Entry) o; return snapshot.equals(entry.snapshot) && state == entry.state && indices.equals(entry.indices) && @@ -291,7 +292,7 @@ public boolean equals(Object o) { return false; } - @SuppressWarnings("unchecked") ShardRestoreStatus status = (ShardRestoreStatus) o; + ShardRestoreStatus status = (ShardRestoreStatus) o; return state == status.state && Objects.equals(nodeId, status.nodeId) && Objects.equals(reason, status.reason); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 74789aada3a46..3bb9d42a5786d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -161,7 +161,6 @@ public void writeTo(final StreamOutput out) throws IOException { } @Override - @SuppressWarnings("unchecked") public Diff diff(final MetaData.Custom previous) { return new IndexGraveyardDiff((IndexGraveyard) previous, this); } @@ -321,7 +320,7 @@ public void writeTo(final StreamOutput out) throws IOException { @Override public IndexGraveyard apply(final MetaData.Custom previous) { - @SuppressWarnings("unchecked") final IndexGraveyard old = (IndexGraveyard) previous; + final IndexGraveyard old = (IndexGraveyard) previous; if (removedCount > old.tombstones.size()) { throw new IllegalStateException("IndexGraveyardDiff cannot remove [" + removedCount + "] entries from [" + old.tombstones.size() + "] tombstones."); @@ -416,7 +415,7 @@ public boolean equals(final Object other) { if (other == null || getClass() != other.getClass()) { return false; } - @SuppressWarnings("unchecked") Tombstone that = (Tombstone) other; + Tombstone that = (Tombstone) other; return index.equals(that.index) && deleteDateInMillis == that.deleteDateInMillis; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 903802050127e..18b89db72a391 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.action.support.ActiveShardCount; @@ -685,7 +686,6 @@ public Custom read(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readFrom(in); } - @SuppressWarnings("unchecked") @Override public Diff readDiff(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index ae58d2885bb7b..d35a4baa1e680 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -381,7 +381,6 @@ public IndexTemplateMetaData build() { aliases.build(), customs.build()); } - @SuppressWarnings("unchecked") public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexTemplateMetaData.name()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 4333959973496..c3da63886140a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.apache.logging.log4j.Logger; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.AliasesRequest; @@ -169,7 +170,6 @@ public interface Custom extends NamedDiffable, ToXContentFragment, Clust private final SortedMap aliasAndIndexLookup; - @SuppressWarnings("unchecked") MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings, ImmutableOpenMap indices, ImmutableOpenMap templates, ImmutableOpenMap customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices, @@ -1000,7 +1000,7 @@ public Builder indexGraveyard(final IndexGraveyard indexGraveyard) { } public IndexGraveyard indexGraveyard() { - @SuppressWarnings("unchecked") IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE); + IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE); return graveyard; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index ff7aab4a25622..13cb85ea399d5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -217,7 +217,7 @@ public boolean equals(Object o) { return false; } - @SuppressWarnings("unchecked") SnapshotRecoverySource that = (SnapshotRecoverySource) o; + SnapshotRecoverySource that = (SnapshotRecoverySource) o; return snapshot.equals(that.snapshot) && index.equals(that.index) && version.equals(that.version); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java index 850e8c9c14202..7ce971958c9d2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java @@ -175,7 +175,7 @@ public boolean equals(Object other) { if (other == null || other instanceof AbstractAllocationDecision == false) { return false; } - @SuppressWarnings("unchecked") AbstractAllocationDecision that = (AbstractAllocationDecision) other; + AbstractAllocationDecision that = (AbstractAllocationDecision) other; return Objects.equals(targetNode, that.targetNode) && Objects.equals(nodeDecisions, that.nodeDecisions); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java index fc2d81b38c493..c32d3e1518ded 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java @@ -316,7 +316,7 @@ public boolean equals(Object other) { if (other instanceof AllocateUnassignedDecision == false) { return false; } - @SuppressWarnings("unchecked") AllocateUnassignedDecision that = (AllocateUnassignedDecision) other; + AllocateUnassignedDecision that = (AllocateUnassignedDecision) other; return Objects.equals(allocationStatus, that.allocationStatus) && Objects.equals(allocationId, that.allocationId) && reuseStore == that.reuseStore diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java index de9795ff4c253..9439187d7395c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java @@ -300,7 +300,7 @@ public boolean equals(Object other) { if (other instanceof MoveDecision == false) { return false; } - @SuppressWarnings("unchecked") MoveDecision that = (MoveDecision) other; + MoveDecision that = (MoveDecision) other; return Objects.equals(allocationDecision, that.allocationDecision) && Objects.equals(canRemainDecision, that.canRemainDecision) && Objects.equals(clusterRebalanceDecision, that.clusterRebalanceDecision) diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index de14e0cd53db6..3c4b35d5c3477 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -54,7 +54,6 @@ public void removeLifecycleListener(LifecycleListener listener) { listeners.remove(listener); } - @SuppressWarnings({"unchecked"}) @Override public void start() { if (!lifecycle.canMoveToStarted()) { @@ -72,7 +71,6 @@ public void start() { protected abstract void doStart(); - @SuppressWarnings({"unchecked"}) @Override public void stop() { if (!lifecycle.canMoveToStopped()) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java b/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java index ce63da62d8dfc..dfc216028c100 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java +++ b/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java @@ -32,7 +32,6 @@ class ConstructorInjectorStore { private final FailableCache, ConstructorInjector> cache = new FailableCache, ConstructorInjector>() { @Override - @SuppressWarnings("unchecked") protected ConstructorInjector create(TypeLiteral type, Errors errors) throws ErrorsException { return createConstructor(type, errors); diff --git a/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java index e42082817c1eb..e296386408518 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java @@ -101,7 +101,6 @@ public String toString() { }, new TypeConverter() { @Override - @SuppressWarnings("unchecked") public Object convert(String value, TypeLiteral toType) { try { return Class.forName(value); @@ -128,7 +127,6 @@ private void convertToPrimitiveType(Class primitiveType, final Class w TypeConverter typeConverter = new TypeConverter() { @Override - @SuppressWarnings("unchecked") public Object convert(String value, TypeLiteral toType) { try { return parser.invoke(null, value); diff --git a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java index cb434a90369d3..d676b19dddb64 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java @@ -42,7 +42,6 @@ class AssistedConstructor { private final ParameterListKey assistedParameters; private final List allParameters; - @SuppressWarnings("unchecked") AssistedConstructor(Constructor constructor, List> parameterTypes) { this.constructor = constructor; diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java index 0cfafc4a30ad5..349935ac7c484 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java @@ -97,7 +97,7 @@ public T get() { try { // We know this cast is safe because T is the method's return type. - @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"}) + @SuppressWarnings({"unchecked"}) T result = (T) method.invoke(instance, parameters); return result; } catch (IllegalAccessException e) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index 5bc1595be5f5c..5447f2ca39962 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -220,7 +220,6 @@ private RealMultibinder(Binder binder, TypeLiteral elementType, } @Override - @SuppressWarnings("unchecked") public void configure(Binder binder) { checkConfiguration(!isInitialized(), "Multibinder was already initialized"); diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java index 75a3b615a1063..0e4f7a801314e 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java @@ -78,8 +78,7 @@ public V visit(ConvertedConstantBinding convertedConstantBinding) { // javac says it's an error to cast ProviderBinding to Binding @Override - @SuppressWarnings("unchecked") public V visit(ProviderBinding providerBinding) { - return visitOther((Binding) providerBinding); + return visitOther(providerBinding); } } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index b11aa9d4a9693..fd9ffdfd31d16 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -59,6 +59,7 @@ import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.EnumSet; @@ -518,7 +519,6 @@ public Map readMap() throws IOException { return (Map) readGenericValue(); } - @SuppressWarnings({"unchecked"}) @Nullable public Object readGenericValue() throws IOException { byte type = readByte(); @@ -932,8 +932,23 @@ public List readStreamableList(Supplier constructor * Reads a list of objects */ public List readList(Writeable.Reader reader) throws IOException { + return readCollection(reader, ArrayList::new); + } + + /** + * Reads a set of objects + */ + public Set readSet(Writeable.Reader reader) throws IOException { + return readCollection(reader, HashSet::new); + } + + /** + * Reads a collection of objects + */ + private > C readCollection(Writeable.Reader reader, + IntFunction constructor) throws IOException { int count = readArraySize(); - List builder = new ArrayList<>(count); + C builder = constructor.apply(count); for (int i=0; i list) throws IOException { } } + /** + * Writes a collection of generic objects via a {@link Writer} + */ + public void writeCollection(Collection collection, Writer writer) throws IOException { + writeVInt(collection.size()); + for (T val: collection) { + writer.write(this, val); + } + } + /** * Writes a list of strings */ diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 3a8a06949b29c..eee45743ee32e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -166,6 +166,13 @@ private KeyStoreWrapper(int formatVersion, boolean hasPassword, byte[] dataBytes this.dataBytes = dataBytes; } + /** + * Get the metadata format version for the keystore + **/ + public int getFormatVersion() { + return formatVersion; + } + /** Returns a path representing the ES keystore in the given config dir. */ public static Path keystorePath(Path configDir) { return configDir.resolve(KEYSTORE_FILENAME); @@ -593,8 +600,10 @@ private void ensureOpen() { @Override public synchronized void close() { this.closed = true; - for (Entry entry : entries.get().values()) { - Arrays.fill(entry.bytes, (byte)0); + if (null != entries.get() && entries.get().isEmpty() == false) { + for (Entry entry : entries.get().values()) { + Arrays.fill(entry.bytes, (byte) 0); + } } } } diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index 021e97767d840..a3f83609037e1 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -178,7 +178,6 @@ public String sourceAsString() { /** * The source of the document (As a map). */ - @SuppressWarnings({"unchecked"}) public Map sourceAsMap() throws ElasticsearchParseException { if (source == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 467ce2a4c1203..663aa7e6f9e10 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -214,7 +214,6 @@ public RootObjectMapper root() { return mapping.root; } - @SuppressWarnings({"unchecked"}) public T metadataMapper(Class type) { return mapping.metadataMapper(type); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 21982d429752b..b82c3781859d3 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -919,6 +919,7 @@ public StoreStats storeStats() { try { return store.stats(); } catch (IOException e) { + failShard("Failing shard because of exception during storeStats", e); throw new ElasticsearchException("io exception while building 'store stats'", e); } } diff --git a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java index 87ea08dc74d2c..59523f3390167 100644 --- a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java +++ b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java @@ -45,7 +45,6 @@ public void onFailure(Exception e) { task.markAsFailed(e); } - @SuppressWarnings("unchecked") @Override protected void doRun() throws Exception { try { diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 9ed0af010b530..4cb8c722f2620 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -85,7 +85,6 @@ public void onFailure(String source, Exception e) { listener.onFailure(e); } - @SuppressWarnings("unchecked") @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { PersistentTasksCustomMetaData tasks = newState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java index 2ac57e074b7bf..a8f9c73ab3266 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java @@ -33,7 +33,6 @@ public class PersistentTasksExecutorRegistry extends AbstractComponent { private final Map> taskExecutors; - @SuppressWarnings("unchecked") public PersistentTasksExecutorRegistry(Settings settings, Collection> taskExecutors) { super(settings); Map> map = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/repositories/IndexId.java b/server/src/main/java/org/elasticsearch/repositories/IndexId.java index 469caa26b645c..2a3d9f15d1637 100644 --- a/server/src/main/java/org/elasticsearch/repositories/IndexId.java +++ b/server/src/main/java/org/elasticsearch/repositories/IndexId.java @@ -89,7 +89,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") IndexId that = (IndexId) o; + IndexId that = (IndexId) o; return Objects.equals(name, that.name) && Objects.equals(id, that.id); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 7a8d8327d5e3a..a97cf4bb419a2 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -238,7 +238,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { return false; } - @SuppressWarnings("unchecked") RepositoryData that = (RepositoryData) obj; + RepositoryData that = (RepositoryData) obj; return snapshotIds.equals(that.snapshotIds) && snapshotStates.equals(that.snapshotStates) && indices.equals(that.indices) diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index efef1aeb04f76..99b47cf83e2ac 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -181,6 +181,8 @@ import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.weighted_avg.InternalWeightedAvg; +import org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; @@ -335,6 +337,8 @@ public ParseFieldRegistry getMovingAverageModel private void registerAggregations(List plugins) { registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder::parse) .addResultReader(InternalAvg::new)); + registerAggregation(new AggregationSpec(WeightedAvgAggregationBuilder.NAME, WeightedAvgAggregationBuilder::new, + WeightedAvgAggregationBuilder::parse).addResultReader(InternalWeightedAvg::new)); registerAggregation(new AggregationSpec(SumAggregationBuilder.NAME, SumAggregationBuilder::new, SumAggregationBuilder::parse) .addResultReader(InternalSum::new)); registerAggregation(new AggregationSpec(MinAggregationBuilder.NAME, MinAggregationBuilder::new, MinAggregationBuilder::parse) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java index 26d8bb1a1bdf5..b4e416f4d7789 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java @@ -82,6 +82,7 @@ import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder; import java.util.Map; @@ -107,6 +108,13 @@ public static AvgAggregationBuilder avg(String name) { return new AvgAggregationBuilder(name); } + /** + * Create a new {@link Avg} aggregation with the given name. + */ + public static WeightedAvgAggregationBuilder weightedAvg(String name) { + return new WeightedAvgAggregationBuilder(name); + } + /** * Create a new {@link Max} aggregation with the given name. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 1428a31a8dedc..e93266db805dc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -247,7 +247,6 @@ static class InternalBucket extends InternalMultiBucketAggregation.InternalBucke this.formats = formats; } - @SuppressWarnings("unchecked") InternalBucket(StreamInput in, List sourceNames, List formats, int[] reverseMuls) throws IOException { this.key = new CompositeKey(in); this.docCount = in.readVLong(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index d998beedf142d..c490b344bdbce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -358,7 +357,6 @@ public static class Unmapped extends NonCollect private final InternalRange.Factory factory; private final DocValueFormat format; - @SuppressWarnings("unchecked") public Unmapped(String name, R[] ranges, boolean keyed, DocValueFormat format, SearchContext context, Aggregator parent, InternalRange.Factory factory, List pipelineAggregators, Map metaData) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/InternalWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/InternalWeightedAvg.java new file mode 100644 index 0000000000000..9ad1a1df78aec --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/InternalWeightedAvg.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.metrics.weighted_avg; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class InternalWeightedAvg extends InternalNumericMetricsAggregation.SingleValue implements WeightedAvg { + private final double sum; + private final double weight; + + public InternalWeightedAvg(String name, double sum, double weight, DocValueFormat format, List pipelineAggregators, + Map metaData) { + super(name, pipelineAggregators, metaData); + this.sum = sum; + this.weight = weight; + this.format = format; + } + + /** + * Read from a stream. + */ + public InternalWeightedAvg(StreamInput in) throws IOException { + super(in); + format = in.readNamedWriteable(DocValueFormat.class); + sum = in.readDouble(); + weight = in.readDouble(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeNamedWriteable(format); + out.writeDouble(sum); + out.writeDouble(weight); + } + + @Override + public double value() { + return getValue(); + } + + @Override + public double getValue() { + return sum / weight; + } + + double getSum() { + return sum; + } + + double getWeight() { + return weight; + } + + DocValueFormat getFormatter() { + return format; + } + + @Override + public String getWriteableName() { + return WeightedAvgAggregationBuilder.NAME; + } + + @Override + public InternalWeightedAvg doReduce(List aggregations, ReduceContext reduceContext) { + double weight = 0; + double sum = 0; + double sumCompensation = 0; + double weightCompensation = 0; + // Compute the sum of double values with Kahan summation algorithm which is more + // accurate than naive summation. + for (InternalAggregation aggregation : aggregations) { + InternalWeightedAvg avg = (InternalWeightedAvg) aggregation; + // If the weight is Inf or NaN, just add it to the running tally to "convert" to + // Inf/NaN. This keeps the behavior bwc from before kahan summing + if (Double.isFinite(avg.weight) == false) { + weight += avg.weight; + } else if (Double.isFinite(weight)) { + double corrected = avg.weight - weightCompensation; + double newWeight = weight + corrected; + weightCompensation = (newWeight - weight) - corrected; + weight = newWeight; + } + // If the avg is Inf or NaN, just add it to the running tally to "convert" to + // Inf/NaN. This keeps the behavior bwc from before kahan summing + if (Double.isFinite(avg.sum) == false) { + sum += avg.sum; + } else if (Double.isFinite(sum)) { + double corrected = avg.sum - sumCompensation; + double newSum = sum + corrected; + sumCompensation = (newSum - sum) - corrected; + sum = newSum; + } + } + return new InternalWeightedAvg(getName(), sum, weight, format, pipelineAggregators(), getMetaData()); + } + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.field(CommonFields.VALUE.getPreferredName(), weight != 0 ? getValue() : null); + if (weight != 0 && format != DocValueFormat.RAW) { + builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(getValue())); + } + return builder; + } + + @Override + protected int doHashCode() { + return Objects.hash(sum, weight, format.getWriteableName()); + } + + @Override + protected boolean doEquals(Object obj) { + InternalWeightedAvg other = (InternalWeightedAvg) obj; + return Objects.equals(sum, other.sum) && + Objects.equals(weight, other.weight) && + Objects.equals(format.getWriteableName(), other.format.getWriteableName()); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/ParsedWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/ParsedWeightedAvg.java new file mode 100644 index 0000000000000..dcda79ce33e92 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/ParsedWeightedAvg.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.weighted_avg; + +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation; + +import java.io.IOException; + +public class ParsedWeightedAvg extends ParsedSingleValueNumericMetricsAggregation implements WeightedAvg { + + @Override + public double getValue() { + return value(); + } + + @Override + public String getType() { + return WeightedAvgAggregationBuilder.NAME; + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + // InternalWeightedAvg renders value only if the avg normalizer (count) is not 0. + // We parse back `null` as Double.POSITIVE_INFINITY so we check for that value here to get the same xContent output + boolean hasValue = value != Double.POSITIVE_INFINITY; + builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null); + if (hasValue && valueAsString != null) { + builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString); + } + return builder; + } + + private static final ObjectParser PARSER + = new ObjectParser<>(ParsedWeightedAvg.class.getSimpleName(), true, ParsedWeightedAvg::new); + + static { + declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY); + } + + public static ParsedWeightedAvg fromXContent(XContentParser parser, final String name) { + ParsedWeightedAvg avg = PARSER.apply(parser, null); + avg.setName(name); + return avg; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvg.java new file mode 100644 index 0000000000000..7af48f677c1f6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvg.java @@ -0,0 +1,32 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.metrics.weighted_avg; + +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; + +/** + * An aggregation that computes the average of the values in the current bucket. + */ +public interface WeightedAvg extends NumericMetricsAggregation.SingleValue { + + /** + * The average value. + */ + double getValue(); +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilder.java new file mode 100644 index 0000000000000..be06f792a5e89 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilder.java @@ -0,0 +1,128 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.weighted_avg; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregationBuilder; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceParseHelper; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class WeightedAvgAggregationBuilder extends MultiValuesSourceAggregationBuilder.LeafOnly { + public static final String NAME = "weighted_avg"; + public static final ParseField VALUE_FIELD = new ParseField("value"); + public static final ParseField WEIGHT_FIELD = new ParseField("weight"); + + private static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>(WeightedAvgAggregationBuilder.NAME); + MultiValuesSourceParseHelper.declareCommon(PARSER, true, ValueType.NUMERIC); + MultiValuesSourceParseHelper.declareField(VALUE_FIELD.getPreferredName(), PARSER, true, false); + MultiValuesSourceParseHelper.declareField(WEIGHT_FIELD.getPreferredName(), PARSER, true, false); + } + + public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { + return PARSER.parse(parser, new WeightedAvgAggregationBuilder(aggregationName), null); + } + + public WeightedAvgAggregationBuilder(String name) { + super(name, ValueType.NUMERIC); + } + + public WeightedAvgAggregationBuilder(WeightedAvgAggregationBuilder clone, Builder factoriesBuilder, Map metaData) { + super(clone, factoriesBuilder, metaData); + } + + public WeightedAvgAggregationBuilder value(MultiValuesSourceFieldConfig valueConfig) { + valueConfig = Objects.requireNonNull(valueConfig, "Configuration for field [" + VALUE_FIELD + "] cannot be null"); + field(VALUE_FIELD.getPreferredName(), valueConfig); + return this; + } + + public WeightedAvgAggregationBuilder weight(MultiValuesSourceFieldConfig weightConfig) { + weightConfig = Objects.requireNonNull(weightConfig, "Configuration for field [" + WEIGHT_FIELD + "] cannot be null"); + field(WEIGHT_FIELD.getPreferredName(), weightConfig); + return this; + } + + /** + * Read from a stream. + */ + public WeightedAvgAggregationBuilder(StreamInput in) throws IOException { + super(in, ValueType.NUMERIC); + } + + @Override + protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { + return new WeightedAvgAggregationBuilder(this, factoriesBuilder, metaData); + } + + @Override + protected void innerWriteTo(StreamOutput out) { + // Do nothing, no extra state to write to stream + } + + @Override + protected MultiValuesSourceAggregatorFactory innerBuild(SearchContext context, + Map> configs, + DocValueFormat format, + AggregatorFactory parent, + Builder subFactoriesBuilder) throws IOException { + return new WeightedAvgAggregatorFactory(name, configs, format, context, parent, subFactoriesBuilder, metaData); + } + + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException { + return builder; + } + + @Override + protected int innerHashCode() { + return 0; + } + + @Override + protected boolean innerEquals(Object obj) { + return true; + } + + @Override + public String getType() { + return NAME; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java new file mode 100644 index 0000000000000..7a34fe6df4a68 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java @@ -0,0 +1,158 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.metrics.weighted_avg; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.MultiValuesSource; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder.VALUE_FIELD; +import static org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder.WEIGHT_FIELD; + +public class WeightedAvgAggregator extends NumericMetricsAggregator.SingleValue { + + private final MultiValuesSource.NumericMultiValuesSource valuesSources; + + private DoubleArray weights; + private DoubleArray sums; + private DoubleArray sumCompensations; + private DoubleArray weightCompensations; + private DocValueFormat format; + + public WeightedAvgAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, DocValueFormat format, + SearchContext context, Aggregator parent, List pipelineAggregators, + Map metaData) throws IOException { + super(name, context, parent, pipelineAggregators, metaData); + this.valuesSources = valuesSources; + this.format = format; + if (valuesSources != null) { + final BigArrays bigArrays = context.bigArrays(); + weights = bigArrays.newDoubleArray(1, true); + sums = bigArrays.newDoubleArray(1, true); + sumCompensations = bigArrays.newDoubleArray(1, true); + weightCompensations = bigArrays.newDoubleArray(1, true); + } + } + + @Override + public boolean needsScores() { + return valuesSources != null && valuesSources.needsScores(); + } + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, + final LeafBucketCollector sub) throws IOException { + if (valuesSources == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + final BigArrays bigArrays = context.bigArrays(); + final SortedNumericDoubleValues docValues = valuesSources.getField(VALUE_FIELD.getPreferredName(), ctx); + final SortedNumericDoubleValues docWeights = valuesSources.getField(WEIGHT_FIELD.getPreferredName(), ctx); + + return new LeafBucketCollectorBase(sub, docValues) { + @Override + public void collect(int doc, long bucket) throws IOException { + weights = bigArrays.grow(weights, bucket + 1); + sums = bigArrays.grow(sums, bucket + 1); + sumCompensations = bigArrays.grow(sumCompensations, bucket + 1); + weightCompensations = bigArrays.grow(weightCompensations, bucket + 1); + + if (docValues.advanceExact(doc) && docWeights.advanceExact(doc)) { + if (docWeights.docValueCount() > 1) { + throw new AggregationExecutionException("Encountered more than one weight for a " + + "single document. Use a script to combine multiple weights-per-doc into a single value."); + } + // There should always be one weight if advanceExact lands us here, either + // a real weight or a `missing` weight + assert docWeights.docValueCount() == 1; + final double weight = docWeights.nextValue(); + + final int numValues = docValues.docValueCount(); + assert numValues > 0; + + for (int i = 0; i < numValues; i++) { + kahanSum(docValues.nextValue() * weight, sums, sumCompensations, bucket); + kahanSum(weight, weights, weightCompensations, bucket); + } + } + } + }; + } + + private static void kahanSum(double value, DoubleArray values, DoubleArray compensations, long bucket) { + // Compute the sum of double values with Kahan summation algorithm which is more + // accurate than naive summation. + double sum = values.get(bucket); + double compensation = compensations.get(bucket); + + if (Double.isFinite(value) == false) { + sum += value; + } else if (Double.isFinite(sum)) { + double corrected = value - compensation; + double newSum = sum + corrected; + compensation = (newSum - sum) - corrected; + sum = newSum; + } + values.set(bucket, sum); + compensations.set(bucket, compensation); + } + + @Override + public double metric(long owningBucketOrd) { + if (valuesSources == null || owningBucketOrd >= sums.size()) { + return Double.NaN; + } + return sums.get(owningBucketOrd) / weights.get(owningBucketOrd); + } + + @Override + public InternalAggregation buildAggregation(long bucket) { + if (valuesSources == null || bucket >= sums.size()) { + return buildEmptyAggregation(); + } + return new InternalWeightedAvg(name, sums.get(bucket), weights.get(bucket), format, pipelineAggregators(), metaData()); + } + + @Override + public InternalAggregation buildEmptyAggregation() { + return new InternalWeightedAvg(name, 0.0, 0L, format, pipelineAggregators(), metaData()); + } + + @Override + public void doClose() { + Releasables.close(weights, sums, sumCompensations, weightCompensations); + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorFactory.java new file mode 100644 index 0000000000000..c7aab73af2867 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorFactory.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.weighted_avg; + +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.MultiValuesSource; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class WeightedAvgAggregatorFactory extends MultiValuesSourceAggregatorFactory { + + public WeightedAvgAggregatorFactory(String name, Map> configs, + DocValueFormat format, SearchContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { + super(name, configs, format, context, parent, subFactoriesBuilder, metaData); + } + + @Override + protected Aggregator createUnmapped(Aggregator parent, List pipelineAggregators, Map metaData) + throws IOException { + return new WeightedAvgAggregator(name, null, format, context, parent, pipelineAggregators, metaData); + } + + @Override + protected Aggregator doCreateInternal(Map> configs, DocValueFormat format, + Aggregator parent, boolean collectsFromSingleBucket, + List pipelineAggregators, + Map metaData) throws IOException { + MultiValuesSource.NumericMultiValuesSource numericMultiVS + = new MultiValuesSource.NumericMultiValuesSource(configs, context.getQueryShardContext()); + if (numericMultiVS.areValuesSourcesEmpty()) { + return createUnmapped(parent, pipelineAggregators, metaData); + } + return new WeightedAvgAggregator(name, numericMultiVS, format, context, parent, pipelineAggregators, metaData); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java index d49da4658ae2d..185e1c63b98b8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java @@ -62,7 +62,6 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation private static final Function> PARSER = name -> { - @SuppressWarnings("unchecked") ConstructingObjectParser parser = new ConstructingObjectParser<>( MovFnPipelineAggregationBuilder.NAME, false, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java new file mode 100644 index 0000000000000..9ceecd75deaf0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSource.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.support; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.query.QueryShardContext; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Class to encapsulate a set of ValuesSource objects labeled by field name + */ +public abstract class MultiValuesSource { + protected Map values; + + public static class NumericMultiValuesSource extends MultiValuesSource { + public NumericMultiValuesSource(Map> valuesSourceConfigs, + QueryShardContext context) throws IOException { + values = new HashMap<>(valuesSourceConfigs.size()); + for (Map.Entry> entry : valuesSourceConfigs.entrySet()) { + values.put(entry.getKey(), entry.getValue().toValuesSource(context)); + } + } + + public SortedNumericDoubleValues getField(String fieldName, LeafReaderContext ctx) throws IOException { + ValuesSource.Numeric value = values.get(fieldName); + if (value == null) { + throw new IllegalArgumentException("Could not find field name [" + fieldName + "] in multiValuesSource"); + } + return value.doubleValues(ctx); + } + } + + public static class BytesMultiValuesSource extends MultiValuesSource { + public BytesMultiValuesSource(Map> valuesSourceConfigs, + QueryShardContext context) throws IOException { + values = new HashMap<>(valuesSourceConfigs.size()); + for (Map.Entry> entry : valuesSourceConfigs.entrySet()) { + values.put(entry.getKey(), entry.getValue().toValuesSource(context)); + } + } + + public Object getField(String fieldName, LeafReaderContext ctx) throws IOException { + ValuesSource.Bytes value = values.get(fieldName); + if (value == null) { + throw new IllegalArgumentException("Could not find field name [" + fieldName + "] in multiValuesSource"); + } + return value.bytesValues(ctx); + } + } + + public static class GeoPointValuesSource extends MultiValuesSource { + public GeoPointValuesSource(Map> valuesSourceConfigs, + QueryShardContext context) throws IOException { + values = new HashMap<>(valuesSourceConfigs.size()); + for (Map.Entry> entry : valuesSourceConfigs.entrySet()) { + values.put(entry.getKey(), entry.getValue().toValuesSource(context)); + } + } + } + + public boolean needsScores() { + return values.values().stream().anyMatch(ValuesSource::needsScores); + } + + public String[] fieldNames() { + return values.keySet().toArray(new String[0]); + } + + public boolean areValuesSourcesEmpty() { + return values.values().stream().allMatch(Objects::isNull); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java new file mode 100644 index 0000000000000..fee685346ec98 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java @@ -0,0 +1,268 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationInitializationException; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Similar to {@link ValuesSourceAggregationBuilder}, except it references multiple ValuesSources (e.g. so that an aggregation + * can pull values from multiple fields). + * + * A limitation of this class is that all the ValuesSource's being refereenced must be of the same type. + */ +public abstract class MultiValuesSourceAggregationBuilder> + extends AbstractAggregationBuilder { + + + public abstract static class LeafOnly> + extends MultiValuesSourceAggregationBuilder { + + protected LeafOnly(String name, ValueType targetValueType) { + super(name, targetValueType); + } + + protected LeafOnly(LeafOnly clone, Builder factoriesBuilder, Map metaData) { + super(clone, factoriesBuilder, metaData); + if (factoriesBuilder.count() > 0) { + throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + + getType() + "] cannot accept sub-aggregations"); + } + } + + /** + * Read from a stream that does not serialize its targetValueType. This should be used by most subclasses. + */ + protected LeafOnly(StreamInput in, ValueType targetValueType) throws IOException { + super(in, targetValueType); + } + + @Override + public AB subAggregations(Builder subFactories) { + throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + + getType() + "] cannot accept sub-aggregations"); + } + } + + + + private Map fields = new HashMap<>(); + private final ValueType targetValueType; + private ValueType valueType = null; + private String format = null; + + protected MultiValuesSourceAggregationBuilder(String name, ValueType targetValueType) { + super(name); + this.targetValueType = targetValueType; + } + + protected MultiValuesSourceAggregationBuilder(MultiValuesSourceAggregationBuilder clone, + Builder factoriesBuilder, Map metaData) { + super(clone, factoriesBuilder, metaData); + + this.fields = new HashMap<>(clone.fields); + this.targetValueType = clone.targetValueType; + this.valueType = clone.valueType; + this.format = clone.format; + } + + protected MultiValuesSourceAggregationBuilder(StreamInput in, ValueType targetValueType) + throws IOException { + super(in); + assert false == serializeTargetValueType() : "Wrong read constructor called for subclass that provides its targetValueType"; + this.targetValueType = targetValueType; + read(in); + } + + /** + * Read from a stream. + */ + @SuppressWarnings("unchecked") + private void read(StreamInput in) throws IOException { + fields = in.readMap(StreamInput::readString, MultiValuesSourceFieldConfig::new); + valueType = in.readOptionalWriteable(ValueType::readFromStream); + format = in.readOptionalString(); + } + + @Override + protected final void doWriteTo(StreamOutput out) throws IOException { + if (serializeTargetValueType()) { + out.writeOptionalWriteable(targetValueType); + } + out.writeMap(fields, StreamOutput::writeString, (o, value) -> value.writeTo(o)); + out.writeOptionalWriteable(valueType); + out.writeOptionalString(format); + innerWriteTo(out); + } + + /** + * Write subclass' state to the stream + */ + protected abstract void innerWriteTo(StreamOutput out) throws IOException; + + @SuppressWarnings("unchecked") + protected AB field(String propertyName, MultiValuesSourceFieldConfig config) { + if (config == null) { + throw new IllegalArgumentException("[config] must not be null: [" + name + "]"); + } + this.fields.put(propertyName, config); + return (AB) this; + } + + public Map fields() { + return fields; + } + + /** + * Sets the {@link ValueType} for the value produced by this aggregation + */ + @SuppressWarnings("unchecked") + public AB valueType(ValueType valueType) { + if (valueType == null) { + throw new IllegalArgumentException("[valueType] must not be null: [" + name + "]"); + } + this.valueType = valueType; + return (AB) this; + } + + /** + * Gets the {@link ValueType} for the value produced by this aggregation + */ + public ValueType valueType() { + return valueType; + } + + /** + * Sets the format to use for the output of the aggregation. + */ + @SuppressWarnings("unchecked") + public AB format(String format) { + if (format == null) { + throw new IllegalArgumentException("[format] must not be null: [" + name + "]"); + } + this.format = format; + return (AB) this; + } + + /** + * Gets the format to use for the output of the aggregation. + */ + public String format() { + return format; + } + + @Override + protected final MultiValuesSourceAggregatorFactory doBuild(SearchContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder) throws IOException { + ValueType finalValueType = this.valueType != null ? this.valueType : targetValueType; + + Map> configs = new HashMap<>(fields.size()); + fields.forEach((key, value) -> { + ValuesSourceConfig config = ValuesSourceConfig.resolve(context.getQueryShardContext(), finalValueType, + value.getFieldName(), value.getScript(), value.getMissing(), value.getTimeZone(), format); + configs.put(key, config); + }); + DocValueFormat docValueFormat = resolveFormat(format, finalValueType); + return innerBuild(context, configs, docValueFormat, parent, subFactoriesBuilder); + } + + + private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) { + if (valueType == null) { + return DocValueFormat.RAW; // we can't figure it out + } + DocValueFormat valueFormat = valueType.defaultFormat; + if (valueFormat instanceof DocValueFormat.Decimal && format != null) { + valueFormat = new DocValueFormat.Decimal(format); + } + return valueFormat; + } + + protected abstract MultiValuesSourceAggregatorFactory innerBuild(SearchContext context, + Map> configs, DocValueFormat format, AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder) throws IOException; + + + /** + * Should this builder serialize its targetValueType? Defaults to false. All subclasses that override this to true + * should use the three argument read constructor rather than the four argument version. + */ + protected boolean serializeTargetValueType() { + return false; + } + + @Override + public final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (fields != null) { + builder.field(CommonFields.FIELDS.getPreferredName(), fields); + } + if (format != null) { + builder.field(CommonFields.FORMAT.getPreferredName(), format); + } + if (valueType != null) { + builder.field(CommonFields.VALUE_TYPE.getPreferredName(), valueType.getPreferredName()); + } + doXContentBody(builder, params); + builder.endObject(); + return builder; + } + + protected abstract XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException; + + @Override + protected final int doHashCode() { + return Objects.hash(fields, format, targetValueType, valueType, innerHashCode()); + } + + protected abstract int innerHashCode(); + + @Override + protected final boolean doEquals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + MultiValuesSourceAggregationBuilder that = (MultiValuesSourceAggregationBuilder) other; + + return Objects.equals(this.fields, that.fields) + && Objects.equals(this.format, that.format) + && Objects.equals(this.valueType, that.valueType); + } + + protected abstract boolean innerEquals(Object obj); +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java new file mode 100644 index 0000000000000..5de8fbd7561dc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public abstract class MultiValuesSourceAggregatorFactory> + extends AggregatorFactory { + + protected final Map> configs; + protected final DocValueFormat format; + + public MultiValuesSourceAggregatorFactory(String name, Map> configs, + DocValueFormat format, SearchContext context, + AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { + super(name, context, parent, subFactoriesBuilder, metaData); + this.configs = configs; + this.format = format; + } + + @Override + public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, + Map metaData) throws IOException { + + return doCreateInternal(configs, format, parent, collectsFromSingleBucket, + pipelineAggregators, metaData); + } + + protected abstract Aggregator createUnmapped(Aggregator parent, List pipelineAggregators, + Map metaData) throws IOException; + + protected abstract Aggregator doCreateInternal(Map> configs, + DocValueFormat format, Aggregator parent, boolean collectsFromSingleBucket, + List pipelineAggregators, + Map metaData) throws IOException; + +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java new file mode 100644 index 0000000000000..56ceae69ff78e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java @@ -0,0 +1,186 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.function.BiFunction; + +public class MultiValuesSourceFieldConfig implements Writeable, ToXContentFragment { + private String fieldName; + private Object missing; + private Script script; + private DateTimeZone timeZone; + + private static final String NAME = "field_config"; + + public static final BiFunction> PARSER + = (scriptable, timezoneAware) -> { + + ObjectParser parser + = new ObjectParser<>(MultiValuesSourceFieldConfig.NAME, MultiValuesSourceFieldConfig.Builder::new); + + parser.declareString(MultiValuesSourceFieldConfig.Builder::setFieldName, ParseField.CommonFields.FIELD); + parser.declareField(MultiValuesSourceFieldConfig.Builder::setMissing, XContentParser::objectText, + ParseField.CommonFields.MISSING, ObjectParser.ValueType.VALUE); + + if (scriptable) { + parser.declareField(MultiValuesSourceFieldConfig.Builder::setScript, + (p, context) -> Script.parse(p), + Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + } + + if (timezoneAware) { + parser.declareField(MultiValuesSourceFieldConfig.Builder::setTimeZone, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return DateTimeZone.forID(p.text()); + } else { + return DateTimeZone.forOffsetHours(p.intValue()); + } + }, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG); + } + return parser; + }; + + private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, DateTimeZone timeZone) { + this.fieldName = fieldName; + this.missing = missing; + this.script = script; + this.timeZone = timeZone; + } + + public MultiValuesSourceFieldConfig(StreamInput in) throws IOException { + this.fieldName = in.readString(); + this.missing = in.readGenericValue(); + this.script = in.readOptionalWriteable(Script::new); + this.timeZone = in.readOptionalTimeZone(); + } + + public Object getMissing() { + return missing; + } + + public Script getScript() { + return script; + } + + public DateTimeZone getTimeZone() { + return timeZone; + } + + public String getFieldName() { + return fieldName; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeGenericValue(missing); + out.writeOptionalWriteable(script); + out.writeOptionalTimeZone(timeZone); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (missing != null) { + builder.field(ParseField.CommonFields.MISSING.getPreferredName(), missing); + } + if (script != null) { + builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script); + } + if (fieldName != null) { + builder.field(ParseField.CommonFields.FIELD.getPreferredName(), fieldName); + } + if (timeZone != null) { + builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone); + } + return builder; + } + + public static class Builder { + private String fieldName; + private Object missing = null; + private Script script = null; + private DateTimeZone timeZone = null; + + public String getFieldName() { + return fieldName; + } + + public Builder setFieldName(String fieldName) { + this.fieldName = fieldName; + return this; + } + + public Object getMissing() { + return missing; + } + + public Builder setMissing(Object missing) { + this.missing = missing; + return this; + } + + public Script getScript() { + return script; + } + + public Builder setScript(Script script) { + this.script = script; + return this; + } + + public DateTimeZone getTimeZone() { + return timeZone; + } + + public Builder setTimeZone(DateTimeZone timeZone) { + this.timeZone = timeZone; + return this; + } + + public MultiValuesSourceFieldConfig build() { + if (Strings.isNullOrEmpty(fieldName) && script == null) { + throw new IllegalArgumentException("[" + ParseField.CommonFields.FIELD.getPreferredName() + + "] and [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] cannot both be null. " + + "Please specify one or the other."); + } + + if (Strings.isNullOrEmpty(fieldName) == false && script != null) { + throw new IllegalArgumentException("[" + ParseField.CommonFields.FIELD.getPreferredName() + + "] and [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] cannot both be configured. " + + "Please specify one or the other."); + } + + return new MultiValuesSourceFieldConfig(fieldName, missing, script, timeZone); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java new file mode 100644 index 0000000000000..4888495f9d8da --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParseHelper.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.AbstractObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +public final class MultiValuesSourceParseHelper { + + public static void declareCommon( + AbstractObjectParser, T> objectParser, boolean formattable, + ValueType targetValueType) { + + objectParser.declareField(MultiValuesSourceAggregationBuilder::valueType, p -> { + ValueType valueType = ValueType.resolveForScript(p.text()); + if (targetValueType != null && valueType.isNotA(targetValueType)) { + throw new ParsingException(p.getTokenLocation(), + "Aggregation [" + objectParser.getName() + "] was configured with an incompatible value type [" + + valueType + "]. It can only work on value of type [" + + targetValueType + "]"); + } + return valueType; + }, ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING); + + if (formattable) { + objectParser.declareField(MultiValuesSourceAggregationBuilder::format, XContentParser::text, + ParseField.CommonFields.FORMAT, ObjectParser.ValueType.STRING); + } + } + + public static void declareField(String fieldName, + AbstractObjectParser, T> objectParser, + boolean scriptable, boolean timezoneAware) { + + objectParser.declareField((o, fieldConfig) -> o.field(fieldName, fieldConfig.build()), + (p, c) -> MultiValuesSourceFieldConfig.PARSER.apply(scriptable, timezoneAware).parse(p, null), + new ParseField(fieldName), ObjectParser.ValueType.OBJECT); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java index 318540e3e5806..7f6e76a6611a8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.support; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -95,6 +96,8 @@ public boolean isNumeric() { private final byte id; private String preferredName; + public static final ParseField VALUE_TYPE = new ParseField("value_type", "valueType"); + ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType, Class fieldDataType, DocValueFormat defaultFormat) { this.id = id; @@ -112,7 +115,7 @@ public String description() { public String getPreferredName() { return preferredName; } - + public ValuesSourceType getValuesSourceType() { return valuesSourceType; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index b33ba8796604a..0e354e14a37ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -54,7 +54,6 @@ public static ValuesSourceConfig resolve( if (field == null) { if (script == null) { - @SuppressWarnings("unchecked") ValuesSourceConfig config = new ValuesSourceConfig<>(ValuesSourceType.ANY); config.format(resolveFormat(null, valueType)); return config; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java index 365233122c43e..fc0a2f3a9fefe 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java @@ -28,7 +28,6 @@ import org.joda.time.DateTimeZone; public final class ValuesSourceParserHelper { - static final ParseField TIME_ZONE = new ParseField("time_zone"); private ValuesSourceParserHelper() {} // utility class, no instantiation @@ -62,10 +61,10 @@ private static void declareFields( objectParser.declareField(ValuesSourceAggregationBuilder::field, XContentParser::text, - new ParseField("field"), ObjectParser.ValueType.STRING); + ParseField.CommonFields.FIELD, ObjectParser.ValueType.STRING); objectParser.declareField(ValuesSourceAggregationBuilder::missing, XContentParser::objectText, - new ParseField("missing"), ObjectParser.ValueType.VALUE); + ParseField.CommonFields.MISSING, ObjectParser.ValueType.VALUE); objectParser.declareField(ValuesSourceAggregationBuilder::valueType, p -> { ValueType valueType = ValueType.resolveForScript(p.text()); @@ -76,11 +75,11 @@ private static void declareFields( + targetValueType + "]"); } return valueType; - }, new ParseField("value_type", "valueType"), ObjectParser.ValueType.STRING); + }, ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING); if (formattable) { objectParser.declareField(ValuesSourceAggregationBuilder::format, XContentParser::text, - new ParseField("format"), ObjectParser.ValueType.STRING); + ParseField.CommonFields.FORMAT, ObjectParser.ValueType.STRING); } if (scriptable) { @@ -96,7 +95,7 @@ private static void declareFields( } else { return DateTimeZone.forOffsetHours(p.intValue()); } - }, TIME_ZONE, ObjectParser.ValueType.LONG); + }, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java index a6b252d6903d9..387e807ba861e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java @@ -19,9 +19,37 @@ package org.elasticsearch.search.aggregations.support; -public enum ValuesSourceType { +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; + +public enum ValuesSourceType implements Writeable { ANY, NUMERIC, BYTES, GEOPOINT; + + public static final ParseField VALUE_SOURCE_TYPE = new ParseField("value_source_type"); + + public static ValuesSourceType fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + public static ValuesSourceType fromStream(StreamInput in) throws IOException { + return in.readEnum(ValuesSourceType.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + ValuesSourceType state = this; + out.writeEnum(state); + } + + public String value() { + return name().toLowerCase(Locale.ROOT); + } } diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 30f1dfb14fc0e..6adad6dabf0b5 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -90,7 +90,6 @@ public class GeoDistanceSortBuilder extends SortBuilder private DistanceUnit unit = DistanceUnit.DEFAULT; private SortMode sortMode = null; - @SuppressWarnings("rawtypes") private QueryBuilder nestedFilter; private String nestedPath; diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreInfo.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreInfo.java index 36e80501fc1b1..6a58b52f72a81 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreInfo.java @@ -18,18 +18,22 @@ */ package org.elasticsearch.snapshots; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * Information about successfully completed restore operation. @@ -120,9 +124,6 @@ static final class Fields { static final String SUCCESSFUL = "successful"; } - /** - * {@inheritDoc} - */ @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -141,9 +142,23 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * {@inheritDoc} - */ + private static final ObjectParser PARSER = new ObjectParser<>(RestoreInfo.class.getName(), true, RestoreInfo::new); + + static { + ObjectParser shardsParser = new ObjectParser<>("shards", true, null); + shardsParser.declareInt((r, s) -> r.totalShards = s, new ParseField(Fields.TOTAL)); + shardsParser.declareInt((r, s) -> { /* only consume, don't set */ }, new ParseField(Fields.FAILED)); + shardsParser.declareInt((r, s) -> r.successfulShards = s, new ParseField(Fields.SUCCESSFUL)); + + PARSER.declareString((r, n) -> r.name = n, new ParseField(Fields.SNAPSHOT)); + PARSER.declareStringArray((r, i) -> r.indices = i, new ParseField(Fields.INDICES)); + PARSER.declareField(shardsParser::parse, new ParseField(Fields.SHARDS), ObjectParser.ValueType.OBJECT); + } + + public static RestoreInfo fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public void readFrom(StreamInput in) throws IOException { name = in.readString(); @@ -157,9 +172,6 @@ public void readFrom(StreamInput in) throws IOException { successfulShards = in.readVInt(); } - /** - * {@inheritDoc} - */ @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); @@ -193,4 +205,24 @@ public static RestoreInfo readOptionalRestoreInfo(StreamInput in) throws IOExcep return in.readOptionalStreamable(RestoreInfo::new); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestoreInfo that = (RestoreInfo) o; + return totalShards == that.totalShards && + successfulShards == that.successfulShards && + Objects.equals(name, that.name) && + Objects.equals(indices, that.indices); + } + + @Override + public int hashCode() { + return Objects.hash(name, indices, totalShards, successfulShards); + } + + @Override + public String toString() { + return Strings.toString(this); + } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java index 314cd4053dd77..2847af386b2e1 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java +++ b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java @@ -80,7 +80,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") Snapshot that = (Snapshot) o; + Snapshot that = (Snapshot) o; return repository.equals(that.repository) && snapshotId.equals(that.snapshotId); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java index b80dfd94d759b..7a8848618c25c 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -98,7 +97,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") final SnapshotId that = (SnapshotId) o; + final SnapshotId that = (SnapshotId) o; return name.equals(that.name) && uuid.equals(that.uuid); } diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 51a4adec8d16d..2d3be2435b401 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -21,7 +21,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Counter; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; @@ -38,6 +37,7 @@ import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.node.Node; import java.io.Closeable; @@ -197,7 +197,7 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui threadContext = new ThreadContext(settings); final Map executors = new HashMap<>(); - for (@SuppressWarnings("unchecked") final Map.Entry entry : builders.entrySet()) { + for (final Map.Entry entry : builders.entrySet()) { final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings); final ExecutorHolder executorHolder = entry.getValue().build(executorSettings, threadContext); if (executors.containsKey(executorHolder.info.getName())) { @@ -338,6 +338,7 @@ public ExecutorService executor(String name) { * the ScheduledFuture will cannot interact with it. * @throws org.elasticsearch.common.util.concurrent.EsRejectedExecutionException if the task cannot be scheduled for execution */ + @Override public ScheduledFuture schedule(TimeValue delay, String executor, Runnable command) { if (!Names.SAME.equals(executor)) { command = new ThreadedRunnable(command, executor(executor)); @@ -358,6 +359,7 @@ public Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, command, executor), e)); } + @Override public Runnable preserveContext(Runnable command) { return getThreadContext().preserveContext(command); } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java index 447bbd92dd2b0..fbe477ad04b1d 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java @@ -40,7 +40,6 @@ default T newInstance() { * * @return the deserialized response. */ - @SuppressWarnings("deprecation") @Override default T read(StreamInput in) throws IOException { T instance = newInstance(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index 2061349e3301d..c8503603f665c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -205,7 +205,14 @@ public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { assertThat(nodesMap.size(), equalTo(cluster().size())); for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { assertThat(nodeResponse.reloadException(), notNullValue()); - assertThat(nodeResponse.reloadException(), instanceOf(IOException.class)); + // Running in a JVM with a BouncyCastle FIPS Security Provider, decrypting the Keystore with the wrong + // password returns a SecurityException if the DataInputStream can't be fully consumed + if (inFipsJvm()) { + assertThat(nodeResponse.reloadException(), instanceOf(SecurityException.class)); + } else { + assertThat(nodeResponse.reloadException(), instanceOf(IOException.class)); + } + } } catch (final AssertionError e) { reloadSettingsError.set(e); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java new file mode 100644 index 0000000000000..fbe8761a07d12 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.restore; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class RestoreSnapshotRequestTests extends AbstractWireSerializingTestCase { + private RestoreSnapshotRequest randomState(RestoreSnapshotRequest instance) { + if (randomBoolean()) { + List indices = new ArrayList<>(); + int count = randomInt(3) + 1; + + for (int i = 0; i < count; ++i) { + indices.add(randomAlphaOfLength(randomInt(3) + 2)); + } + + instance.indices(indices); + } + if (randomBoolean()) { + instance.renamePattern(randomUnicodeOfLengthBetween(1, 100)); + } + if (randomBoolean()) { + instance.renameReplacement(randomUnicodeOfLengthBetween(1, 100)); + } + instance.partial(randomBoolean()); + instance.includeAliases(randomBoolean()); + + if (randomBoolean()) { + Map settings = new HashMap<>(); + int count = randomInt(3) + 1; + + for (int i = 0; i < count; ++i) { + settings.put(randomAlphaOfLengthBetween(2, 5), randomAlphaOfLengthBetween(2, 5)); + } + + instance.settings(settings); + } + if (randomBoolean()) { + Map indexSettings = new HashMap<>(); + int count = randomInt(3) + 1; + + for (int i = 0; i < count; ++i) { + indexSettings.put(randomAlphaOfLengthBetween(2, 5), randomAlphaOfLengthBetween(2, 5));; + } + instance.indexSettings(indexSettings); + } + + instance.includeGlobalState(randomBoolean()); + + if (randomBoolean()) { + Collection wildcardStates = randomSubsetOf( + Arrays.asList(IndicesOptions.WildcardStates.values())); + Collection options = randomSubsetOf( + Arrays.asList(IndicesOptions.Option.ALLOW_NO_INDICES, IndicesOptions.Option.IGNORE_UNAVAILABLE)); + + instance.indicesOptions(new IndicesOptions( + options.isEmpty() ? IndicesOptions.Option.NONE : EnumSet.copyOf(options), + wildcardStates.isEmpty() ? IndicesOptions.WildcardStates.NONE : EnumSet.copyOf(wildcardStates))); + } + + instance.waitForCompletion(randomBoolean()); + + if (randomBoolean()) { + instance.masterNodeTimeout(randomTimeValue()); + } + return instance; + } + + @Override + protected RestoreSnapshotRequest createTestInstance() { + return randomState(new RestoreSnapshotRequest(randomAlphaOfLength(5), randomAlphaOfLength(10))); + } + + @Override + protected Writeable.Reader instanceReader() { + return RestoreSnapshotRequest::new; + } + + @Override + protected RestoreSnapshotRequest mutateInstance(RestoreSnapshotRequest instance) throws IOException { + RestoreSnapshotRequest copy = copyInstance(instance); + // ensure that at least one property is different + copy.repository("copied-" + instance.repository()); + return randomState(copy); + } + + public void testSource() throws IOException { + RestoreSnapshotRequest original = createTestInstance(); + XContentBuilder builder = original.toXContent(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Collections.emptyMap())); + XContentParser parser = XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); + Map map = parser.mapOrdered(); + + // we will only restore properties from the map that are contained in the request body. All other + // properties are restored from the original (in the actual REST action this is restored from the + // REST path and request parameters). + RestoreSnapshotRequest processed = new RestoreSnapshotRequest(original.repository(), original.snapshot()); + processed.masterNodeTimeout(original.masterNodeTimeout()); + processed.waitForCompletion(original.waitForCompletion()); + + processed.source(map); + + assertEquals(original, processed); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponseTests.java new file mode 100644 index 0000000000000..17d1ecafabdae --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponseTests.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.restore; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.snapshots.RestoreInfo; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class RestoreSnapshotResponseTests extends AbstractXContentTestCase { + + @Override + protected RestoreSnapshotResponse createTestInstance() { + if (randomBoolean()) { + String name = randomRealisticUnicodeOfCodepointLengthBetween(1, 30); + List indices = new ArrayList<>(); + indices.add("test0"); + indices.add("test1"); + int totalShards = randomIntBetween(1, 1000); + int successfulShards = randomIntBetween(0, totalShards); + return new RestoreSnapshotResponse(new RestoreInfo(name, indices, totalShards, successfulShards)); + } else { + return new RestoreSnapshotResponse(null); + } + } + + @Override + protected RestoreSnapshotResponse doParseInstance(XContentParser parser) throws IOException { + return RestoreSnapshotResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 264a92137be96..f53eb63bc100d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -153,7 +153,6 @@ public void testRefreshListeners() throws Exception { assertEquals(0, common.refresh.getListeners()); } - @SuppressWarnings("unchecked") public void testUuidOnRootStatsIndices() { String uuid = createIndex("test").indexUUID(); IndicesStatsResponse rsp = client().admin().indices().prepareStats().get(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 0b9055cb75850..9d82e9e1cdca5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -672,7 +672,6 @@ private static void assertIndexMappingsNotFiltered(ImmutableOpenMap properties, String... fields) { for (String field : fields) { assertTrue(properties.containsKey(field)); - @SuppressWarnings("unchecked") Map fieldProp = (Map)properties.get(field); assertNotNull(fieldProp); assertFalse(fieldProp.containsKey("properties")); diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java index d64dece7867aa..6431a3469b6b0 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; @@ -42,6 +43,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.iterableWithSize; public class StreamTests extends ESTestCase { @@ -65,7 +67,7 @@ public void testBooleanSerialization() throws IOException { final Set set = IntStream.range(Byte.MIN_VALUE, Byte.MAX_VALUE).mapToObj(v -> (byte) v).collect(Collectors.toSet()); set.remove((byte) 0); set.remove((byte) 1); - final byte[] corruptBytes = new byte[] { randomFrom(set) }; + final byte[] corruptBytes = new byte[]{randomFrom(set)}; final BytesReference corrupt = new BytesArray(corruptBytes); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> corrupt.streamInput().readBoolean()); final String message = String.format(Locale.ROOT, "unexpected byte [0x%02x]", corruptBytes[0]); @@ -100,7 +102,7 @@ public void testOptionalBooleanSerialization() throws IOException { set.remove((byte) 0); set.remove((byte) 1); set.remove((byte) 2); - final byte[] corruptBytes = new byte[] { randomFrom(set) }; + final byte[] corruptBytes = new byte[]{randomFrom(set)}; final BytesReference corrupt = new BytesArray(corruptBytes); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> corrupt.streamInput().readOptionalBoolean()); final String message = String.format(Locale.ROOT, "unexpected byte [0x%02x]", corruptBytes[0]); @@ -119,22 +121,22 @@ public void testRandomVLongSerialization() throws IOException { public void testSpecificVLongSerialization() throws IOException { List> values = - Arrays.asList( - new Tuple<>(0L, new byte[]{0}), - new Tuple<>(-1L, new byte[]{1}), - new Tuple<>(1L, new byte[]{2}), - new Tuple<>(-2L, new byte[]{3}), - new Tuple<>(2L, new byte[]{4}), - new Tuple<>(Long.MIN_VALUE, new byte[]{-1, -1, -1, -1, -1, -1, -1, -1, -1, 1}), - new Tuple<>(Long.MAX_VALUE, new byte[]{-2, -1, -1, -1, -1, -1, -1, -1, -1, 1}) - - ); + Arrays.asList( + new Tuple<>(0L, new byte[]{0}), + new Tuple<>(-1L, new byte[]{1}), + new Tuple<>(1L, new byte[]{2}), + new Tuple<>(-2L, new byte[]{3}), + new Tuple<>(2L, new byte[]{4}), + new Tuple<>(Long.MIN_VALUE, new byte[]{-1, -1, -1, -1, -1, -1, -1, -1, -1, 1}), + new Tuple<>(Long.MAX_VALUE, new byte[]{-2, -1, -1, -1, -1, -1, -1, -1, -1, 1}) + + ); for (Tuple value : values) { BytesStreamOutput out = new BytesStreamOutput(); out.writeZLong(value.v1()); assertArrayEquals(Long.toString(value.v1()), value.v2(), BytesReference.toBytes(out.bytes())); BytesReference bytes = new BytesArray(value.v2()); - assertEquals(Arrays.toString(value.v2()), (long)value.v1(), bytes.streamInput().readZLong()); + assertEquals(Arrays.toString(value.v2()), (long) value.v1(), bytes.streamInput().readZLong()); } } @@ -158,7 +160,7 @@ public void testLinkedHashMap() throws IOException { } BytesStreamOutput out = new BytesStreamOutput(); out.writeGenericValue(write); - LinkedHashMap read = (LinkedHashMap)out.bytes().streamInput().readGenericValue(); + LinkedHashMap read = (LinkedHashMap) out.bytes().streamInput().readGenericValue(); assertEquals(size, read.size()); int index = 0; for (Map.Entry entry : read.entrySet()) { @@ -172,7 +174,8 @@ public void testFilterStreamInputDelegatesAvailable() throws IOException { final int length = randomIntBetween(1, 1024); StreamInput delegate = StreamInput.wrap(new byte[length]); - FilterStreamInput filterInputStream = new FilterStreamInput(delegate) {}; + FilterStreamInput filterInputStream = new FilterStreamInput(delegate) { + }; assertEquals(filterInputStream.available(), length); // read some bytes @@ -201,7 +204,7 @@ public void testReadArraySize() throws IOException { } stream.writeByteArray(array); InputStreamStreamInput streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), array - .length-1); + .length - 1); expectThrows(EOFException.class, streamInput::readByteArray); streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), BytesReference.toBytes(stream .bytes()).length); @@ -230,6 +233,21 @@ public void testWritableArrays() throws IOException { assertThat(targetArray, equalTo(sourceArray)); } + public void testSetOfLongs() throws IOException { + final int size = randomIntBetween(0, 6); + final Set sourceSet = new HashSet<>(size); + for (int i = 0; i < size; i++) { + sourceSet.add(randomLongBetween(i * 1000, (i + 1) * 1000 - 1)); + } + assertThat(sourceSet, iterableWithSize(size)); + + final BytesStreamOutput out = new BytesStreamOutput(); + out.writeCollection(sourceSet, StreamOutput::writeLong); + + final Set targetSet = out.bytes().streamInput().readSet(StreamInput::readLong); + assertThat(targetSet, equalTo(sourceSet)); + } + static final class WriteableString implements Writeable { final String string; diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 7d72ad9a4e16e..cdaccb486f289 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -61,8 +61,8 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchQueryBuilder doCreateTestQueryBuilder() { - String fieldName = STRING_ALIAS_FIELD_NAME; //randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, - //INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, + INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); if (fieldName.equals(DATE_FIELD_NAME)) { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 703ba237daa75..97ec88168f341 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -28,6 +28,8 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; @@ -120,6 +122,7 @@ import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.ElasticsearchException; import java.io.IOException; import java.nio.charset.Charset; @@ -146,6 +149,7 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.LongFunction; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -1172,6 +1176,81 @@ public void testShardStats() throws IOException { closeShards(shard); } + + public void testShardStatsWithFailures() throws IOException { + allowShardFailures(); + final ShardId shardId = new ShardId("index", "_na_", 0); + final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, ShardRoutingState.INITIALIZING); + final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); + + + ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .build(); + IndexMetaData metaData = IndexMetaData.builder(shardRouting.getIndexName()) + .settings(settings) + .primaryTerm(0, 1) + .build(); + + // Override two Directory methods to make them fail at our will + // We use AtomicReference here to inject failure in the middle of the test not immediately + // We use Supplier instead of IOException to produce meaningful stacktrace + // (remember stack trace is filled when exception is instantiated) + AtomicReference> exceptionToThrow = new AtomicReference<>(); + AtomicBoolean throwWhenMarkingStoreCorrupted = new AtomicBoolean(false); + Directory directory = new FilterDirectory(newFSDirectory(shardPath.resolveIndex())) { + //fileLength method is called during storeStats try block + //it's not called when store is marked as corrupted + @Override + public long fileLength(String name) throws IOException { + Supplier ex = exceptionToThrow.get(); + if (ex == null) { + return super.fileLength(name); + } else { + throw ex.get(); + } + } + + //listAll method is called when marking store as corrupted + @Override + public String[] listAll() throws IOException { + Supplier ex = exceptionToThrow.get(); + if (throwWhenMarkingStoreCorrupted.get() && ex != null) { + throw ex.get(); + } else { + return super.listAll(); + } + } + }; + + try (Store store = createStore(shardId, new IndexSettings(metaData, Settings.EMPTY), directory)) { + IndexShard shard = newShard(shardRouting, shardPath, metaData, store, + null, new InternalEngineFactory(), () -> { + }, EMPTY_EVENT_LISTENER); + AtomicBoolean failureCallbackTriggered = new AtomicBoolean(false); + shard.addShardFailureCallback((ig)->failureCallbackTriggered.set(true)); + + recoverShardFromStore(shard); + + final boolean corruptIndexException = randomBoolean(); + + if (corruptIndexException) { + exceptionToThrow.set(() -> new CorruptIndexException("Test CorruptIndexException", "Test resource")); + throwWhenMarkingStoreCorrupted.set(randomBoolean()); + } else { + exceptionToThrow.set(() -> new IOException("Test IOException")); + } + ElasticsearchException e = expectThrows(ElasticsearchException.class, shard::storeStats); + assertTrue(failureCallbackTriggered.get()); + + if (corruptIndexException && !throwWhenMarkingStoreCorrupted.get()) { + assertTrue(store.isMarkedCorrupted()); + } + } + } + public void testRefreshMetric() throws IOException { IndexShard shard = newStartedShard(); assertThat(shard.refreshStats().getTotal(), equalTo(2L)); // refresh on: finalize and end of recovery @@ -1878,6 +1957,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), + null, wrapper, new InternalEngineFactory(), () -> {}, @@ -2030,6 +2110,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), + null, wrapper, new InternalEngineFactory(), () -> {}, @@ -2533,7 +2614,7 @@ public void testReadSnapshotAndCheckIndexConcurrently() throws Exception { .put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "true", "checksum", "fix"))) .build(); final IndexShard newShard = newShard(shardRouting, indexShard.shardPath(), indexMetaData, - null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); + null, null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); Store.MetadataSnapshot storeFileMetaDatas = newShard.snapshotStoreMetadata(); assertTrue("at least 2 files, commit and data: " + storeFileMetaDatas.toString(), storeFileMetaDatas.size() > 1); @@ -3040,7 +3121,7 @@ public void testFlushOnInactive() throws Exception { ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); AtomicBoolean markedInactive = new AtomicBoolean(); AtomicReference primaryRef = new AtomicReference<>(); - IndexShard primary = newShard(shardRouting, shardPath, metaData, null, new InternalEngineFactory(), () -> { + IndexShard primary = newShard(shardRouting, shardPath, metaData, null, null, new InternalEngineFactory(), () -> { }, new IndexEventListener() { @Override public void onShardInactive(IndexShard indexShard) { diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 3a5302bcec2e6..5611421594aa1 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -97,6 +97,7 @@ public void tearDown() throws Exception { terminate(threadPool); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32308") public void testRandomClusterStateUpdates() { // we have an IndicesClusterStateService per node in the cluster final Map clusterStateServiceMap = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index 0eae9a1420068..fa7de2d629112 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -105,6 +105,7 @@ public void testRestoreSnapshotWithExistingFiles() throws IOException { shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, + null, new InternalEngineFactory(), () -> {}, EMPTY_EVENT_LISTENER); diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 1abdb97f174b6..0137f136d3ecf 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -123,7 +123,7 @@ public void testRetrieveSnapshots() throws Exception { logger.info("--> make sure the node's repository can resolve the snapshots"); final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2); @@ -245,7 +245,7 @@ private BlobStoreRepository setupRepo() { assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); assertThat("getBlobContainer has to be lazy initialized", repository.getBlobContainer(), nullValue()); return repository; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index d216709791e94..2876fbbaa252d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -32,9 +32,9 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; @@ -84,7 +84,6 @@ protected Collection> nodePlugins() { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index bce4006fa1078..e7e6402727449 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -81,7 +81,6 @@ protected Collection> nodePlugins() { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index 4a85c2c145329..eeb6e12161383 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -77,7 +78,6 @@ protected Collection> nodePlugins() { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index edc29b0d2c574..99aeac167e06e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -75,7 +75,6 @@ protected Collection> nodePlugins() { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 0ed1dacb73f5e..b0263cb2dbd80 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -1746,7 +1746,6 @@ private void addToDocument(Document doc, Map> keys) { } } - @SuppressWarnings("unchecked") private static Map createAfterKey(Object... fields) { assert fields.length % 2 == 0; final Map map = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index c893e59596484..e1206cb8d1552 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -655,6 +655,7 @@ public void testPreGetChildLeafCollectors() throws IOException { public void testFieldAlias() throws IOException { int numRootDocs = randomIntBetween(1, 20); + int expectedNestedDocs = 0; MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( NumberFieldMapper.NumberType.LONG); @@ -665,6 +666,7 @@ public void testFieldAlias() throws IOException { for (int i = 0; i < numRootDocs; i++) { List documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); + expectedNestedDocs += numNestedDocs; generateDocuments(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME); Document document = new Document(); @@ -681,7 +683,6 @@ public void testFieldAlias() throws IOException { try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( max(MAX_AGG_NAME).field(VALUE_FIELD_NAME)); - NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias")); @@ -690,8 +691,8 @@ public void testFieldAlias() throws IOException { Nested aliasNested = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); - assertTrue(nested.getDocCount() > 0); assertEquals(nested, aliasNested); + assertEquals(expectedNestedDocs, nested.getDocCount()); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 99322af2264ac..fd831e5076caa 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -169,6 +169,7 @@ public void testMaxFromParentDocs() throws IOException { public void testFieldAlias() throws IOException { int numParentDocs = randomIntBetween(1, 20); + int expectedParentDocs = 0; MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( NumberFieldMapper.NumberType.LONG); @@ -179,6 +180,10 @@ public void testFieldAlias() throws IOException { for (int i = 0; i < numParentDocs; i++) { List documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); + if (numNestedDocs > 0) { + expectedParentDocs++; + } + for (int nested = 0; nested < numNestedDocs; nested++) { Document document = new Document(); document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), @@ -203,7 +208,6 @@ public void testFieldAlias() throws IOException { } try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { - MaxAggregationBuilder maxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME); MaxAggregationBuilder aliasMaxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias"); @@ -220,8 +224,8 @@ public void testFieldAlias() throws IOException { ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME); ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME); - assertTrue(reverseNested.getDocCount() > 0); assertEquals(reverseNested, aliasReverseNested); + assertEquals(expectedParentDocs, reverseNested.getDocCount()); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 3b7e686ef4d85..160e51a67b2c8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -36,9 +36,6 @@ import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.stats.Stats; @@ -103,7 +100,6 @@ public void resetOptimizations() { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorTests.java new file mode 100644 index 0000000000000..70b1b651723e0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorTests.java @@ -0,0 +1,428 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.weighted_avg; + +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.function.Consumer; + +import static java.util.Collections.singleton; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class WeightedAvgAggregatorTests extends AggregatorTestCase { + + public void testNoDocs() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + // Intentionally not writing any docs + }, avg -> { + assertEquals(Double.NaN, avg.getValue(), 0); + }); + } + + public void testNoMatchingField() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7))); + iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 3))); + }, avg -> { + assertEquals(Double.NaN, avg.getValue(), 0); + }); + } + + public void testSomeMatchesSortedNumericDocValuesNoWeight() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 1))); + }, avg -> { + assertEquals(4, avg.getValue(), 0); + }); + } + + public void testSomeMatchesSortedNumericDocValuesWeights() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 2))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 3))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 3))); + + }, avg -> { + // (7*2 + 2*3 + 3*3) / (2+3+3) == 3.625 + assertEquals(3.625, avg.getValue(), 0); + }); + } + + public void testSomeMatchesNumericDocValues() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new DocValuesFieldExistsQuery("value_field"), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 1))); + }, avg -> { + assertEquals(4, avg.getValue(), 0); + }); + } + + public void testQueryFiltering() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(IntPoint.newRangeQuery("value_field", 0, 3), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new IntPoint("value_field", 7), new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new IntPoint("value_field", 1), new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new IntPoint("value_field", 3), new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 1))); + }, avg -> { + assertEquals(2.5, avg.getValue(), 0); + }); + } + + public void testQueryFilteringWeights() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(IntPoint.newRangeQuery("filter_field", 0, 3), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new IntPoint("filter_field", 7), new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 2))); + iw.addDocument(Arrays.asList(new IntPoint("filter_field", 2), new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 3))); + iw.addDocument(Arrays.asList(new IntPoint("filter_field", 3), new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 4))); + }, avg -> { + double value = (2.0*3.0 + 3.0*4.0) / (3.0+4.0); + assertEquals(value, avg.getValue(), 0); + }); + } + + public void testQueryFiltersAll() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(IntPoint.newRangeQuery("value_field", -1, 0), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new IntPoint("value_field", 7), new SortedNumericDocValuesField("value_field", 7))); + iw.addDocument(Arrays.asList(new IntPoint("value_field", 1), new SortedNumericDocValuesField("value_field", 2))); + iw.addDocument(Arrays.asList(new IntPoint("value_field", 3), new SortedNumericDocValuesField("value_field", 7))); + }, avg -> { + assertEquals(Double.NaN, avg.getValue(), 0); + }); + } + + public void testQueryFiltersAllWeights() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(IntPoint.newRangeQuery("value_field", -1, 0), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new IntPoint("filter_field", 7), new SortedNumericDocValuesField("value_field", 7), + new SortedNumericDocValuesField("weight_field", 2))); + iw.addDocument(Arrays.asList(new IntPoint("filter_field", 2), new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 3))); + iw.addDocument(Arrays.asList(new IntPoint("filter_field", 3), new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 4))); + }, avg -> { + assertEquals(Double.NaN, avg.getValue(), 0); + }); + } + + public void testValueSetMissing() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder() + .setFieldName("value_field") + .setMissing(2) + .build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 2))); + iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 3))); + iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 4))); + }, avg -> { + double value = (2.0*2.0 + 2.0*3.0 + 2.0*4.0) / (2.0+3.0+4.0); + assertEquals(value, avg.getValue(), 0); + }); + } + + public void testWeightSetMissing() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder() + .setFieldName("weight_field") + .setMissing(2) + .build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 2))); + iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 3))); + iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 4))); + }, avg -> { + double value = (2.0*2.0 + 3.0*2.0 + 4.0*2.0) / (2.0+2.0+2.0); + assertEquals(value, avg.getValue(), 0); + }); + } + + public void testWeightSetTimezone() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder() + .setFieldName("weight_field") + .setTimeZone(DateTimeZone.UTC) + .build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4), + new SortedNumericDocValuesField("weight_field", 1))); + }, avg -> { + fail("Should not have executed test case"); + })); + assertThat(e.getMessage(), equalTo("Field [weight_field] of type [long] does not support custom time zones")); + } + + public void testValueSetTimezone() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder() + .setFieldName("value_field") + .setTimeZone(DateTimeZone.UTC) + .build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4), + new SortedNumericDocValuesField("weight_field", 1))); + }, avg -> { + fail("Should not have executed test case"); + })); + assertThat(e.getMessage(), equalTo("Field [value_field] of type [long] does not support custom time zones")); + } + + public void testMultiValues() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder() + .setFieldName("value_field") + .build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + + testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("value_field", 4), new SortedNumericDocValuesField("weight_field", 1))); + iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4), + new SortedNumericDocValuesField("value_field", 5), new SortedNumericDocValuesField("weight_field", 1))); + }, avg -> { + double value = (((2.0+3.0)/2.0) + ((3.0+4.0)/2.0) + ((4.0+5.0)/2.0)) / (1.0+1.0+1.0); + assertEquals(value, avg.getValue(), 0); + }); + } + + public void testMultiWeight() throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder() + .setFieldName("weight_field") + .build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + + AggregationExecutionException e = expectThrows(AggregationExecutionException.class, + () -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> { + iw.addDocument(Arrays.asList( + new SortedNumericDocValuesField("value_field", 2), + new SortedNumericDocValuesField("weight_field", 2), new SortedNumericDocValuesField("weight_field", 3))); + iw.addDocument(Arrays.asList( + new SortedNumericDocValuesField("value_field", 3), + new SortedNumericDocValuesField("weight_field", 3), new SortedNumericDocValuesField("weight_field", 4))); + iw.addDocument(Arrays.asList( + new SortedNumericDocValuesField("value_field", 4), + new SortedNumericDocValuesField("weight_field", 4), new SortedNumericDocValuesField("weight_field", 5))); + }, avg -> { + fail("Should have thrown exception"); + })); + assertThat(e.getMessage(), containsString("Encountered more than one weight for a single document. " + + "Use a script to combine multiple weights-per-doc into a single value.")); + } + + + public void testSummationAccuracy() throws IOException { + // Summing up a normal array and expect an accurate value + double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7}; + verifyAvgOfDoubles(values, 0.9, 0d); + + // Summing up an array which contains NaN and infinities and expect a result same as naive summation + int n = randomIntBetween(5, 10); + values = new double[n]; + double sum = 0; + for (int i = 0; i < n; i++) { + values[i] = frequently() + ? randomFrom(Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY) + : randomDoubleBetween(Double.MIN_VALUE, Double.MAX_VALUE, true); + sum += values[i]; + } + verifyAvgOfDoubles(values, sum / n, 1e-10); + + // Summing up some big double values and expect infinity result + n = randomIntBetween(5, 10); + double[] largeValues = new double[n]; + for (int i = 0; i < n; i++) { + largeValues[i] = Double.MAX_VALUE; + } + verifyAvgOfDoubles(largeValues, Double.POSITIVE_INFINITY, 0d); + + for (int i = 0; i < n; i++) { + largeValues[i] = -Double.MAX_VALUE; + } + verifyAvgOfDoubles(largeValues, Double.NEGATIVE_INFINITY, 0d); + } + + private void verifyAvgOfDoubles(double[] values, double expected, double delta) throws IOException { + MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); + MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); + WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") + .value(valueConfig) + .weight(weightConfig); + testCase(new MatchAllDocsQuery(), aggregationBuilder, + iw -> { + for (double value : values) { + iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", NumericUtils.doubleToSortableLong(value)), + new SortedNumericDocValuesField("weight_field", NumericUtils.doubleToSortableLong(1.0)))); + } + }, + avg -> assertEquals(expected, avg.getValue(), delta), + NumberFieldMapper.NumberType.DOUBLE + ); + } + + private void testCase(Query query, WeightedAvgAggregationBuilder aggregationBuilder, + CheckedConsumer buildIndex, + Consumer verify) throws IOException { + testCase(query, aggregationBuilder, buildIndex, verify, NumberFieldMapper.NumberType.LONG); + } + + private void testCase(Query query, WeightedAvgAggregationBuilder aggregationBuilder, + CheckedConsumer buildIndex, + Consumer verify, + NumberFieldMapper.NumberType fieldNumberType) throws IOException { + + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + buildIndex.accept(indexWriter); + indexWriter.close(); + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + try { + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(fieldNumberType); + fieldType.setName("value_field"); + fieldType.setHasDocValues(true); + + MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(fieldNumberType); + fieldType2.setName("weight_field"); + fieldType2.setHasDocValues(true); + + WeightedAvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType, fieldType2); + aggregator.preCollection(); + indexSearcher.search(query, aggregator); + aggregator.postCollection(); + verify.accept((InternalWeightedAvg) aggregator.buildAggregation(0L)); + } finally { + indexReader.close(); + directory.close(); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index fa46921a9416f..f5dc01f19148b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -262,7 +262,6 @@ public void testNoBuckets() throws IOException { }); } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify) throws IOException { executeTestCase(query, aggBuilder, verify, indexWriter -> { Document document = new Document(); @@ -282,7 +281,6 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume }); } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify, CheckedConsumer setup) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java index 88bbe3671b232..db3f2d745e1f6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java @@ -98,7 +98,6 @@ public void testMatchAllDocs() throws IOException { } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, DateHistogramAggregationBuilder aggBuilder, Consumer verify, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java new file mode 100644 index 0000000000000..ac1c07a40490e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.script.Script; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class MultiValuesSourceFieldConfigTests extends ESTestCase { + public void testMissingFieldScript() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MultiValuesSourceFieldConfig.Builder().build()); + assertThat(e.getMessage(), equalTo("[field] and [script] cannot both be null. Please specify one or the other.")); + } + + public void testBothFieldScript() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new MultiValuesSourceFieldConfig.Builder().setFieldName("foo").setScript(new Script("foo")).build()); + assertThat(e.getMessage(), equalTo("[field] and [script] cannot both be configured. Please specify one or the other.")); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 2126e0e94eb92..31fa4f838dfff 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -96,7 +96,6 @@ protected Collection> nodePlugins() { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); @@ -143,7 +142,6 @@ protected Map, Object>> pluginScripts() { return scripts; } - @SuppressWarnings("unchecked") static Object fieldsScript(Map vars, String fieldName) { Map fields = (Map) vars.get("_fields"); FieldLookup fieldLookup = (FieldLookup) fields.get(fieldName); @@ -156,7 +154,6 @@ static Object sourceScript(Map vars, String path) { return XContentMapValues.extractValue(path, source); } - @SuppressWarnings("unchecked") static Object docScript(Map vars, String fieldName) { Map doc = (Map) vars.get("doc"); ScriptDocValues values = (ScriptDocValues) doc.get(fieldName); diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 12e48a3ae4f0a..fc11554dfb3fe 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; @@ -48,8 +49,6 @@ import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; - -import org.elasticsearch.script.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -70,7 +69,6 @@ protected Collection> nodePlugins() { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); scripts.put("1", vars -> 1.0d); diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 257089c90545f..8203dac1a2dcd 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -67,7 +67,6 @@ protected Collection> nodePlugins() { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); @@ -84,7 +83,6 @@ protected Map, Object>> pluginScripts() { return scripts; } - @SuppressWarnings("unchecked") static Double scoringScript(Map vars, Function scoring) { Map doc = (Map) vars.get("doc"); Double index = ((Number) ((ScriptDocValues) doc.get("index")).getValues().get(0)).doubleValue(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index ff0196aacdf16..40d6b26b4f930 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -86,7 +86,6 @@ public class FieldSortIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); scripts.put("doc['number'].value", vars -> sortDoubleScript(vars)); @@ -94,14 +93,12 @@ protected Map, Object>> pluginScripts() { return scripts; } - @SuppressWarnings("unchecked") static Double sortDoubleScript(Map vars) { Map doc = (Map) vars.get("doc"); Double index = ((Number) ((ScriptDocValues) doc.get("number")).getValues().get(0)).doubleValue(); return index; } - @SuppressWarnings("unchecked") static String sortStringScript(Map vars) { Map doc = (Map) vars.get("doc"); String value = ((String) ((ScriptDocValues) doc.get("keyword")).getValues().get(0)); diff --git a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java index aa49bed6975b1..6668c1be0e439 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.test.ESIntegTestCase; @@ -50,8 +51,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; - -import org.elasticsearch.script.ScriptType; import static org.elasticsearch.search.sort.SortBuilders.scriptSort; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -104,7 +103,6 @@ protected Map, Object>> pluginScripts() { /** * Return the minimal value from a set of values. */ - @SuppressWarnings("unchecked") static > T getMinValueScript(Map vars, T initialValue, String fieldName, Function converter) { T retval = initialValue; diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh index 137135dc2aa4d..d0d1570ae299a 100755 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh @@ -20,11 +20,14 @@ set -e if [[ $# -lt 1 ]]; then - echo 'Usage: addprinc.sh ' + echo 'Usage: addprinc.sh principalName [password]' + echo ' principalName user principal name without realm' + echo ' password If provided then will set password for user else it will provision user with keytab' exit 1 fi PRINC="$1" +PASSWD="$2" USER=$(echo $PRINC | tr "/" "_") VDIR=/vagrant @@ -47,12 +50,17 @@ ADMIN_KTAB=$LOCALSTATEDIR/admin.keytab USER_PRIN=$PRINC@$REALM USER_KTAB=$LOCALSTATEDIR/$USER.keytab -if [ -f $USER_KTAB ]; then +if [ -f $USER_KTAB ] && [ -z "$PASSWD" ]; then echo "Principal '${PRINC}@${REALM}' already exists. Re-copying keytab..." + sudo cp $USER_KTAB $KEYTAB_DIR/$USER.keytab else - echo "Provisioning '${PRINC}@${REALM}' principal and keytab..." - sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "addprinc -randkey $USER_PRIN" - sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "ktadd -k $USER_KTAB $USER_PRIN" + if [ -z "$PASSWD" ]; then + echo "Provisioning '${PRINC}@${REALM}' principal and keytab..." + sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "addprinc -randkey $USER_PRIN" + sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "ktadd -k $USER_KTAB $USER_PRIN" + sudo cp $USER_KTAB $KEYTAB_DIR/$USER.keytab + else + echo "Provisioning '${PRINC}@${REALM}' principal with password..." + sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "addprinc -pw $PASSWD $PRINC" + fi fi - -sudo cp $USER_KTAB $KEYTAB_DIR/$USER.keytab diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index bd2e2fe8f1999..1efc3eb1c3c4b 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -284,13 +284,14 @@ assert shardRoutings().stream() public synchronized IndexShard addReplicaWithExistingPath(final ShardPath shardPath, final String nodeId) throws IOException { final ShardRouting shardRouting = TestShardRouting.newShardRouting( - shardId, - nodeId, - false, ShardRoutingState.INITIALIZING, - RecoverySource.PeerRecoverySource.INSTANCE); + shardId, + nodeId, + false, ShardRoutingState.INITIALIZING, + RecoverySource.PeerRecoverySource.INSTANCE); final IndexShard newReplica = - newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); + newShard(shardRouting, shardPath, indexMetaData, null, null, getEngineFactory(shardRouting), + () -> {}, EMPTY_EVENT_LISTENER); replicas.add(newReplica); updateAllocationIDsOnPrimary(); return newReplica; diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 794a90624bf95..23054c4446540 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -156,15 +156,20 @@ public Settings threadPoolSettings() { return Settings.EMPTY; } - private Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException { - final ShardId shardId = shardPath.getShardId(); + + protected Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException { + return createStore(shardPath.getShardId(), indexSettings, newFSDirectory(shardPath.resolveIndex())); + } + + protected Store createStore(ShardId shardId, IndexSettings indexSettings, Directory directory) throws IOException { final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { @Override public Directory newDirectory() throws IOException { - return newFSDirectory(shardPath.resolveIndex()); + return directory; } }; return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); + } /** @@ -304,29 +309,32 @@ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, final ShardId shardId = routing.shardId(); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); - return newShard(routing, shardPath, indexMetaData, indexSearcherWrapper, engineFactory, globalCheckpointSyncer, + return newShard(routing, shardPath, indexMetaData, null, indexSearcherWrapper, engineFactory, globalCheckpointSyncer, EMPTY_EVENT_LISTENER, listeners); } /** * creates a new initializing shard. - * @param routing shard routing to use - * @param shardPath path to use for shard data - * @param indexMetaData indexMetaData for the shard, including any mapping - * @param indexSearcherWrapper an optional wrapper to be used during searchers - * @param globalCheckpointSyncer callback for syncing global checkpoints - * @param indexEventListener index even listener - * @param listeners an optional set of listeners to add to the shard + * @param routing shard routing to use + * @param shardPath path to use for shard data + * @param indexMetaData indexMetaData for the shard, including any mapping + * @param store an optional custom store to use. If null a default file based store will be created + * @param indexSearcherWrapper an optional wrapper to be used during searchers + * @param globalCheckpointSyncer callback for syncing global checkpoints + * @param indexEventListener index event listener + * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData, - @Nullable IndexSearcherWrapper indexSearcherWrapper, + @Nullable Store store, @Nullable IndexSearcherWrapper indexSearcherWrapper, @Nullable EngineFactory engineFactory, Runnable globalCheckpointSyncer, IndexEventListener indexEventListener, IndexingOperationListener... listeners) throws IOException { final Settings nodeSettings = Settings.builder().put("node.name", routing.currentNodeId()).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, nodeSettings); final IndexShard indexShard; - final Store store = createStore(indexSettings, shardPath); + if (store == null) { + store = createStore(indexSettings, shardPath); + } boolean success = false; try { IndexCache indexCache = new IndexCache(indexSettings, new DisabledQueryCache(indexSettings), null); @@ -377,6 +385,7 @@ protected IndexShard reinitShard(IndexShard current, ShardRouting routing, Index current.shardPath(), current.indexSettings().getIndexMetaData(), null, + null, current.engineFactory, current.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER, listeners); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 439728bac9ea6..cef44ed17fd0e 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -248,7 +248,7 @@ public void testIndicesDeletedFromRepository() throws Exception { logger.info("--> verify index folder deleted from blob container"); RepositoriesService repositoriesSvc = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName()); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName()); - @SuppressWarnings("unchecked") BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName); + BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName); final SetOnce indicesBlobContainer = new SetOnce<>(); final SetOnce repositoryData = new SetOnce<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 2dd2a9b972780..24a48725f105f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -176,6 +176,7 @@ import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; +import java.security.Security; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -2368,4 +2369,7 @@ protected void assertSeqNos() throws Exception { }); } + public static boolean inFipsJvm() { + return Security.getProviders()[0].getName().toLowerCase(Locale.ROOT).contains("fips"); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 5d555ece438fa..44fe621dfa4e4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,6 +29,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -148,6 +149,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BooleanSupplier; import java.util.function.Consumer; +import java.util.function.IntFunction; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -716,6 +718,20 @@ public static String[] generateRandomStringArray(int maxArraySize, int stringSiz return generateRandomStringArray(maxArraySize, stringSize, allowNull, true); } + public static T[] randomArray(int maxArraySize, IntFunction arrayConstructor, Supplier valueConstructor) { + return randomArray(0, maxArraySize, arrayConstructor, valueConstructor); + } + + public static T[] randomArray(int minArraySize, int maxArraySize, IntFunction arrayConstructor, Supplier valueConstructor) { + final int size = randomIntBetween(minArraySize, maxArraySize); + final T[] array = arrayConstructor.apply(size); + for (int i = 0; i < array.length; i++) { + array[i] = valueConstructor.get(); + } + return array; + } + + private static final String[] TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m", "micros", "nanos"}; public static String randomTimeValue(int lower, int upper, String... suffixes) { @@ -1070,7 +1086,6 @@ private static List shuffleList(List list, Set exceptFie List targetList = new ArrayList<>(); for(Object value : list) { if (value instanceof Map) { - @SuppressWarnings("unchecked") LinkedHashMap valueMap = (LinkedHashMap) value; targetList.add(shuffleMap(valueMap, exceptFields)); } else if(value instanceof List) { @@ -1090,7 +1105,6 @@ public static LinkedHashMap shuffleMap(LinkedHashMap valueMap = (LinkedHashMap) value; targetMap.put(key, shuffleMap(valueMap, exceptFields)); } else if(value instanceof List && exceptFields.contains(key) == false) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index cf3cc39d34d88..48fc798207493 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -49,6 +49,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; @@ -685,7 +687,6 @@ private static void assertMapEquals(Map expected, Map expected, List actual) { assertEquals(expected.size(), actual.size()); Iterator actualIterator = actual.iterator(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 937adddf3a43d..e25e8b43d3a72 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -536,6 +536,11 @@ protected static void createIndex(String name, Settings settings, String mapping client().performRequest(request); } + protected static void deleteIndex(String name) throws IOException { + Request request = new Request("DELETE", "/" + name); + client().performRequest(request); + } + protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException { updateIndexSettings(index, settings.build()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java index 39a1f1d378067..494d65e05de71 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java @@ -54,7 +54,6 @@ public GreaterThanAssertion(XContentLocation location, String field, Object expe } @Override - @SuppressWarnings("unchecked") protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is greater than [{}] (field: [{}])", actualValue, expectedValue, getField()); assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java index 84c82f4159dc6..6d1e5116474ff 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -64,7 +64,7 @@ public int channelsPerNodeConnection() { @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { final MockTcpTransport t = (MockTcpTransport) transport; - @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = + final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java index cf9eb5d7a8c57..108411dee5b89 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java @@ -96,7 +96,6 @@ protected MockTransportService build(Settings settings, Version version, Cluster @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 4176cdeb0b7d6..2ae26044be5ab 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -30,7 +30,7 @@ buildRestTests.expectedUnconvertedCandidates = [ ] dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } @@ -264,7 +264,7 @@ setups['farequote_index'] = ''' airline: type: keyword doc_count: - type: integer + type: integer ''' setups['farequote_data'] = setups['farequote_index'] + ''' - do: @@ -277,7 +277,7 @@ setups['farequote_data'] = setups['farequote_index'] + ''' {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5} {"index": {"_id":"2"}} {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23} - {"index": {"_id":"3"}} + {"index": {"_id":"3"}} {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42} ''' setups['farequote_job'] = setups['farequote_data'] + ''' @@ -309,7 +309,7 @@ setups['farequote_datafeed'] = setups['farequote_job'] + ''' "job_id":"farequote", "indexes":"farequote" } -''' +''' setups['server_metrics_index'] = ''' - do: indices.create: diff --git a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc index 270ad005144ac..f770adf1f0d1c 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc @@ -33,7 +33,7 @@ live? ==== Request Body -There is no request body for the Get Jobs API. +There is no request body for the Get Rollup Caps API. ==== Authorization @@ -179,4 +179,4 @@ GET _xpack/rollup/data/sensor-1 Why is this? The original rollup job was configured against a specific index pattern (`sensor-*`) not a concrete index (`sensor-1`). So while the index belongs to the pattern, the rollup job is only valid across the entirety of the pattern not just one of it's containing indices. So for that reason, the Rollup Capabilities API only returns information based -on the originally configured index name or pattern. \ No newline at end of file +on the originally configured index name or pattern. diff --git a/x-pack/docs/en/rest-api/security/privileges.asciidoc b/x-pack/docs/en/rest-api/security/privileges.asciidoc index 4ec192d633b12..adaf27e97073e 100644 --- a/x-pack/docs/en/rest-api/security/privileges.asciidoc +++ b/x-pack/docs/en/rest-api/security/privileges.asciidoc @@ -84,7 +84,8 @@ The following example output indicates which privileges the "rdeniro" user has: "read" : true, "write" : false } - } + }, + "application" : {} } -------------------------------------------------- // TESTRESPONSE[s/"rdeniro"/"$body.username"/] diff --git a/x-pack/docs/en/rest-api/security/roles.asciidoc b/x-pack/docs/en/rest-api/security/roles.asciidoc index b7b2260a0e426..28c09c560ec3c 100644 --- a/x-pack/docs/en/rest-api/security/roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/roles.asciidoc @@ -140,6 +140,7 @@ role. If the role is not defined in the `native` realm, the request 404s. }, "query" : "{\"match\": {\"title\": \"foo\"}}" } ], + "applications" : [ ], "run_as" : [ "other_user" ], "metadata" : { "version" : 1 diff --git a/x-pack/license-tools/build.gradle b/x-pack/license-tools/build.gradle index 3ef08073bbf84..183b9ab50e03b 100644 --- a/x-pack/license-tools/build.gradle +++ b/x-pack/license-tools/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'elasticsearch.build' dependencies { - compile project(xpackModule('core')) + compile project(path: xpackModule('core'), configuration: 'shadow') compile "org.elasticsearch:elasticsearch:${version}" testCompile "org.elasticsearch.test:framework:${version}" } @@ -17,7 +17,7 @@ task buildZip(type: Zip, dependsOn: jar) { into(parentDir + '/lib') { from jar from configurations.runtime - } + } into(parentDir + '/bin') { from 'bin' } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index ca529496bf168..b9cd464241fa2 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -104,39 +104,28 @@ integTestRunner { systemProperty 'tests.rest.blacklist', blacklist.join(',') } -// location of generated keystores and certificates +// location for keys and certificates File keystoreDir = new File(project.buildDir, 'keystore') - -// Generate the node's keystore -File nodeKeystore = new File(keystoreDir, 'test-node.jks') -task createNodeKeyStore(type: LoggedExec) { - doFirst { - if (nodeKeystore.parentFile.exists() == false) { - nodeKeystore.parentFile.mkdirs() - } - if (nodeKeystore.exists()) { - delete nodeKeystore +File nodeKey = file("$keystoreDir/testnode.pem") +File nodeCert = file("$keystoreDir/testnode.crt") + +// Add key and certs to test classpath: it expects them there +// User cert and key PEM files instead of a JKS Keystore for the cluster's trust material so that +// it can run in a FIPS 140 JVM +// TODO: Remove all existing uses of cross project file references when the new approach for referencing static files is available +// https://github.com/elastic/elasticsearch/pull/32201 +task copyKeyCerts(type: Copy) { + from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) { + include 'testnode.crt', 'testnode.pem' } - } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) - args '-genkey', - '-alias', 'test-node', - '-keystore', nodeKeystore, - '-keyalg', 'RSA', - '-keysize', '2048', - '-validity', '712', - '-dname', 'CN=smoke-test-plugins-ssl', - '-keypass', 'keypass', - '-storepass', 'keypass' + into keystoreDir } - // Add keystores to test classpath: it expects it there sourceSets.test.resources.srcDir(keystoreDir) -processTestResources.dependsOn(createNodeKeyStore) +processTestResources.dependsOn(copyKeyCerts) integTestCluster { - dependsOn createNodeKeyStore + dependsOn copyKeyCerts setting 'xpack.ml.enabled', 'true' setting 'xpack.security.enabled', 'true' setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE' @@ -145,17 +134,19 @@ integTestCluster { setting 'xpack.monitoring.exporters._local.enabled', 'false' setting 'xpack.security.authc.token.enabled', 'true' setting 'xpack.security.transport.ssl.enabled', 'true' - setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name + setting 'xpack.security.transport.ssl.key', nodeKey.name + setting 'xpack.security.transport.ssl.certificate', nodeCert.name setting 'xpack.security.transport.ssl.verification_mode', 'certificate' setting 'xpack.security.audit.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' keystoreSetting 'bootstrap.password', 'x-pack-test-password' - keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' distribution = 'zip' // this is important since we use the reindex module in ML setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' - extraConfigFile nodeKeystore.name, nodeKeystore + extraConfigFile nodeKey.name, nodeKey + extraConfigFile nodeCert.name, nodeCert waitCondition = { NodeInfo node, AntBuilder ant -> File tmpFile = new File(node.cwd, 'wait.success') diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 7ed98ccb5b472..b498246391ded 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -8,6 +8,7 @@ import java.nio.file.StandardCopyOption apply plugin: 'elasticsearch.esplugin' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' +apply plugin: 'com.github.johnrengelman.shadow' archivesBaseName = 'x-pack-core' @@ -27,17 +28,17 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" compile project(':x-pack:protocol') - compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" - compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" - compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" - compile "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" + shadow "org.apache.httpcomponents:httpclient:${versions.httpclient}" + shadow "org.apache.httpcomponents:httpcore:${versions.httpcore}" + shadow "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" + shadow "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" - compile "commons-logging:commons-logging:${versions.commonslogging}" - compile "commons-codec:commons-codec:${versions.commonscodec}" + shadow "commons-logging:commons-logging:${versions.commonslogging}" + shadow "commons-codec:commons-codec:${versions.commonscodec}" // security deps - compile 'com.unboundid:unboundid-ldapsdk:3.2.0' - compile project(path: ':modules:transport-netty4', configuration: 'runtime') + shadow 'com.unboundid:unboundid-ldapsdk:3.2.0' + shadow project(path: ':modules:transport-netty4', configuration: 'runtime') testCompile 'org.elasticsearch:securemock:1.2' testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" @@ -107,7 +108,8 @@ test { // TODO: don't publish test artifacts just to run messy tests, fix the tests! // https://github.com/elastic/x-plugins/issues/724 configurations { - testArtifacts.extendsFrom testRuntime + testArtifacts.extendsFrom(testRuntime, shadow) + testArtifacts.exclude(group: project(':x-pack:protocol').group, module: project(':x-pack:protocol').name) } task testJar(type: Jar) { appendix 'test' diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index d5e38fc0cb808..505d2c55fa652 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -28,6 +28,8 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.license.LicensesStatus; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesStatus.java deleted file mode 100644 index 91e0d7239cfa1..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesStatus.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -public enum LicensesStatus { - VALID((byte) 0), - INVALID((byte) 1), - EXPIRED((byte) 2); - - private final byte id; - - LicensesStatus(byte id) { - this.id = id; - } - - public int id() { - return id; - } - - public static LicensesStatus fromId(int id) { - if (id == 0) { - return VALID; - } else if (id == 1) { - return INVALID; - } else if (id == 2) { - return EXPIRED; - } else { - throw new IllegalStateException("no valid LicensesStatus for id=" + id); - } - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java index 07979d15ea5ef..14a059e9e014a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; public class LicensingClient { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java index d93957a9d8bae..497b203f4136d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.license; import org.elasticsearch.action.Action; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; public class PutLicenseAction extends Action { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java index b7c93d03cd5ff..bc5201a8f1ffd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java @@ -9,6 +9,7 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; /** * Register license request builder diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseResponse.java deleted file mode 100644 index c85bb068da39e..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseResponse.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -public class PutLicenseResponse extends AcknowledgedResponse { - - private LicensesStatus status; - private Map acknowledgeMessages; - private String acknowledgeHeader; - - PutLicenseResponse() { - } - - public PutLicenseResponse(boolean acknowledged, LicensesStatus status) { - this(acknowledged, status, null, Collections.emptyMap()); - } - - public PutLicenseResponse(boolean acknowledged, LicensesStatus status, String acknowledgeHeader, - Map acknowledgeMessages) { - super(acknowledged); - this.status = status; - this.acknowledgeHeader = acknowledgeHeader; - this.acknowledgeMessages = acknowledgeMessages; - } - - public LicensesStatus status() { - return status; - } - - public Map acknowledgeMessages() { - return acknowledgeMessages; - } - - public String acknowledgeHeader() { - return acknowledgeHeader; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - status = LicensesStatus.fromId(in.readVInt()); - acknowledgeHeader = in.readOptionalString(); - int size = in.readVInt(); - Map acknowledgeMessages = new HashMap<>(size); - for (int i = 0; i < size; i++) { - String feature = in.readString(); - int nMessages = in.readVInt(); - String[] messages = new String[nMessages]; - for (int j = 0; j < nMessages; j++) { - messages[j] = in.readString(); - } - acknowledgeMessages.put(feature, messages); - } - this.acknowledgeMessages = acknowledgeMessages; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(status.id()); - out.writeOptionalString(acknowledgeHeader); - out.writeVInt(acknowledgeMessages.size()); - for (Map.Entry entry : acknowledgeMessages.entrySet()) { - out.writeString(entry.getKey()); - out.writeVInt(entry.getValue().length); - for (String message : entry.getValue()) { - out.writeString(message); - } - } - } - - @Override - protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { - switch (status) { - case VALID: - builder.field("license_status", "valid"); - break; - case INVALID: - builder.field("license_status", "invalid"); - break; - case EXPIRED: - builder.field("license_status", "expired"); - break; - default: - throw new IllegalArgumentException("unknown status [" + status + "] found"); - } - if (!acknowledgeMessages.isEmpty()) { - builder.startObject("acknowledge"); - builder.field("message", acknowledgeHeader); - for (Map.Entry entry : acknowledgeMessages.entrySet()) { - builder.startArray(entry.getKey()); - for (String message : entry.getValue()) { - builder.value(message); - } - builder.endArray(); - } - builder.endObject(); - } - } - - @Override - public String toString() { - return Strings.toString(this, true, true); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java index 032d1eb6e86d1..571bfc1413c58 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPutLicenseAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index aa60456d8052a..6e57c243630d3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -133,6 +133,8 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExceptExpression; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.action.GetCertificateInfoAction; @@ -342,6 +344,11 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(ClusterState.Custom.class, TokenMetaData.TYPE, TokenMetaData::new), new NamedWriteableRegistry.Entry(NamedDiff.class, TokenMetaData.TYPE, TokenMetaData::readDiffFrom), new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.SECURITY, SecurityFeatureSetUsage::new), + // security : conditional privileges + new NamedWriteableRegistry.Entry(ConditionalClusterPrivilege.class, + ConditionalClusterPrivileges.ManageApplicationPrivileges.WRITEABLE_NAME, + ConditionalClusterPrivileges.ManageApplicationPrivileges::createFrom), + // security : role-mappings new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AllExpression.NAME, AllExpression::new), new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AnyExpression.NAME, AnyExpression::new), new NamedWriteableRegistry.Entry(RoleMapperExpression.class, FieldExpression.NAME, FieldExpression::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index d14c72383d6a8..aaa3effcfe8bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -52,7 +52,7 @@ import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackInfoAction; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.ml.MLMetadataField; +import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.rest.action.RestXPackInfoAction; import org.elasticsearch.xpack.core.rest.action.RestXPackUsageAction; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; @@ -197,7 +197,7 @@ public static List nodesNotReadyForXPackCustomMetadata(ClusterSta private static boolean alreadyContainsXPackCustomMetadata(ClusterState clusterState) { final MetaData metaData = clusterState.metaData(); return metaData.custom(LicensesMetaData.TYPE) != null || - metaData.custom(MLMetadataField.TYPE) != null || + metaData.custom(MlMetadata.TYPE) != null || metaData.custom(WatcherMetaData.TYPE) != null || clusterState.custom(TokenMetaData.TYPE) != null; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MLMetadataField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MLMetadataField.java deleted file mode 100644 index bef1b57902db1..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MLMetadataField.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.ml; - -public final class MLMetadataField { - - public static final String TYPE = "ml"; - - private MLMetadataField() {} - - /** - * Namespaces the task ids for datafeeds. - * A job id can be used as a datafeed id, because they are stored separately in cluster state. - */ - public static String datafeedTaskId(String datafeedId) { - return "datafeed-" + datafeedId; - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 85e5c99fe3581..f35058359947f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -55,6 +55,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom { + public static final String TYPE = "ml"; private static final ParseField JOBS_FIELD = new ParseField("jobs"); private static final ParseField DATAFEEDS_FIELD = new ParseField("datafeeds"); @@ -119,7 +120,7 @@ public Version getMinimalSupportedVersion() { @Override public String getWriteableName() { - return MLMetadataField.TYPE; + return TYPE; } @Override @@ -213,7 +214,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String getWriteableName() { - return MLMetadataField.TYPE; + return TYPE; } static Diff readJobDiffFrom(StreamInput in) throws IOException { @@ -277,7 +278,7 @@ public Builder putJob(Job job, boolean overwrite) { public Builder deleteJob(String jobId, PersistentTasksCustomMetaData tasks) { checkJobHasNoDatafeed(jobId); - JobState jobState = MlMetadata.getJobState(jobId, tasks); + JobState jobState = MlTasks.getJobState(jobId, tasks); if (jobState.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) { throw ExceptionsHelper.conflictStatusException("Unexpected job state [" + jobState + "], expected [" + JobState.CLOSED + " or " + JobState.FAILED + "]"); @@ -362,7 +363,7 @@ private Optional getDatafeedByJobId(String jobId) { private void checkDatafeedIsStopped(Supplier msg, String datafeedId, PersistentTasksCustomMetaData persistentTasks) { if (persistentTasks != null) { - if (persistentTasks.getTask(MLMetadataField.datafeedTaskId(datafeedId)) != null) { + if (persistentTasks.getTask(MlTasks.datafeedTaskId(datafeedId)) != null) { throw ExceptionsHelper.conflictStatusException(msg.get()); } } @@ -399,7 +400,7 @@ public void markJobAsDeleted(String jobId, PersistentTasksCustomMetaData tasks, checkJobHasNoDatafeed(jobId); if (allowDeleteOpenJob == false) { - PersistentTask jobTask = getJobTask(jobId, tasks); + PersistentTask jobTask = MlTasks.getJobTask(jobId, tasks); if (jobTask != null) { JobTaskState jobTaskState = (JobTaskState) jobTask.getState(); throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] because the job is " @@ -420,56 +421,10 @@ void checkJobHasNoDatafeed(String jobId) { } } - /** - * Namespaces the task ids for jobs. - * A datafeed id can be used as a job id, because they are stored separately in cluster state. - */ - public static String jobTaskId(String jobId) { - return "job-" + jobId; - } - - @Nullable - public static PersistentTask getJobTask(String jobId, @Nullable PersistentTasksCustomMetaData tasks) { - if (tasks == null) { - return null; - } - return tasks.getTask(jobTaskId(jobId)); - } - - @Nullable - public static PersistentTask getDatafeedTask(String datafeedId, @Nullable PersistentTasksCustomMetaData tasks) { - if (tasks == null) { - return null; - } - return tasks.getTask(MLMetadataField.datafeedTaskId(datafeedId)); - } - public static JobState getJobState(String jobId, @Nullable PersistentTasksCustomMetaData tasks) { - PersistentTask task = getJobTask(jobId, tasks); - if (task != null) { - JobTaskState jobTaskState = (JobTaskState) task.getState(); - if (jobTaskState == null) { - return JobState.OPENING; - } - return jobTaskState.getState(); - } - // If we haven't opened a job than there will be no persistent task, which is the same as if the job was closed - return JobState.CLOSED; - } - - public static DatafeedState getDatafeedState(String datafeedId, @Nullable PersistentTasksCustomMetaData tasks) { - PersistentTask task = getDatafeedTask(datafeedId, tasks); - if (task != null && task.getState() != null) { - return (DatafeedState) task.getState(); - } else { - // If we haven't started a datafeed then there will be no persistent task, - // which is the same as if the datafeed was't started - return DatafeedState.STOPPED; - } - } public static MlMetadata getMlMetadata(ClusterState state) { - MlMetadata mlMetadata = (state == null) ? null : state.getMetaData().custom(MLMetadataField.TYPE); + MlMetadata mlMetadata = (state == null) ? null : state.getMetaData().custom(TYPE); if (mlMetadata == null) { return EMPTY_METADATA; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java new file mode 100644 index 0000000000000..5c17271738e32 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; + +public final class MlTasks { + + private MlTasks() { + } + + /** + * Namespaces the task ids for jobs. + * A datafeed id can be used as a job id, because they are stored separately in cluster state. + */ + public static String jobTaskId(String jobId) { + return "job-" + jobId; + } + + /** + * Namespaces the task ids for datafeeds. + * A job id can be used as a datafeed id, because they are stored separately in cluster state. + */ + public static String datafeedTaskId(String datafeedId) { + return "datafeed-" + datafeedId; + } + + @Nullable + public static PersistentTasksCustomMetaData.PersistentTask getJobTask(String jobId, @Nullable PersistentTasksCustomMetaData tasks) { + return tasks == null ? null : tasks.getTask(jobTaskId(jobId)); + } + + @Nullable + public static PersistentTasksCustomMetaData.PersistentTask getDatafeedTask(String datafeedId, + @Nullable PersistentTasksCustomMetaData tasks) { + return tasks == null ? null : tasks.getTask(datafeedTaskId(datafeedId)); + } + + public static JobState getJobState(String jobId, @Nullable PersistentTasksCustomMetaData tasks) { + PersistentTasksCustomMetaData.PersistentTask task = getJobTask(jobId, tasks); + if (task != null) { + JobTaskState jobTaskState = (JobTaskState) task.getState(); + if (jobTaskState == null) { + return JobState.OPENING; + } + return jobTaskState.getState(); + } + // If we haven't opened a job than there will be no persistent task, which is the same as if the job was closed + return JobState.CLOSED; + } + + public static DatafeedState getDatafeedState(String datafeedId, @Nullable PersistentTasksCustomMetaData tasks) { + PersistentTasksCustomMetaData.PersistentTask task = getDatafeedTask(datafeedId, tasks); + if (task != null && task.getState() != null) { + return (DatafeedState) task.getState(); + } else { + // If we haven't started a datafeed then there will be no persistent task, + // which is the same as if the datafeed was't started + return DatafeedState.STOPPED; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java index 451679f364600..48cef12f01c43 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xpack.core.ml.MLMetadataField; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -84,11 +84,8 @@ public String getDatafeedId() { @Override public boolean match(Task task) { - String expectedDescription = MLMetadataField.datafeedTaskId(datafeedId); - if (task instanceof StartDatafeedAction.DatafeedTaskMatcher && expectedDescription.equals(task.getDescription())){ - return true; - } - return false; + String expectedDescription = MlTasks.datafeedTaskId(datafeedId); + return task instanceof StartDatafeedAction.DatafeedTaskMatcher && expectedDescription.equals(task.getDescription()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index 0117225141085..c802f0bcccaa5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xpack.core.ml.MLMetadataField; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -125,7 +125,7 @@ public void setAllowNoDatafeeds(boolean allowNoDatafeeds) { @Override public boolean match(Task task) { for (String id : resolvedStartedDatafeedIds) { - String expectedDescription = MLMetadataField.datafeedTaskId(id); + String expectedDescription = MlTasks.datafeedTaskId(id); if (task instanceof StartDatafeedAction.DatafeedTaskMatcher && expectedDescription.equals(task.getDescription())){ return true; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/ApplicationPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/ApplicationPrivilegesRequest.java new file mode 100644 index 0000000000000..3d7c765936112 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/ApplicationPrivilegesRequest.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.privilege; + +import java.util.Collection; + +/** + * Interface implemented by all Requests that manage application privileges + */ +public interface ApplicationPrivilegesRequest { + + Collection getApplicationNames(); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesAction.java new file mode 100644 index 0000000000000..a36d2fdec74c4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesAction.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.Action; + +/** + * Action for deleting application privileges. + */ +public final class DeletePrivilegesAction extends Action { + + public static final DeletePrivilegesAction INSTANCE = new DeletePrivilegesAction(); + public static final String NAME = "cluster:admin/xpack/security/privilege/delete"; + + private DeletePrivilegesAction() { + super(NAME); + } + + @Override + public DeletePrivilegesResponse newResponse() { + return new DeletePrivilegesResponse(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequest.java new file mode 100644 index 0000000000000..d5ed78a482315 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequest.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * A request to delete an application privilege. + */ +public final class DeletePrivilegesRequest extends ActionRequest + implements ApplicationPrivilegesRequest, WriteRequest { + + private String application; + private String[] privileges; + private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; + + public DeletePrivilegesRequest() { + this(null, Strings.EMPTY_ARRAY); + } + + public DeletePrivilegesRequest(String application, String[] privileges) { + this.application = application; + this.privileges = privileges; + } + + @Override + public DeletePrivilegesRequest setRefreshPolicy(RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (Strings.isNullOrEmpty(application)) { + validationException = addValidationError("application name is missing", validationException); + } + if (privileges == null || privileges.length == 0 || Arrays.stream(privileges).allMatch(Strings::isNullOrEmpty)) { + validationException = addValidationError("privileges are missing", validationException); + } + return validationException; + } + + public void application(String application) { + this.application = application; + } + + public String application() { + return application; + } + + @Override + public Collection getApplicationNames() { + return Collections.singleton(application); + } + + public String[] privileges() { + return this.privileges; + } + + public void privileges(String[] privileges) { + this.privileges = privileges; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + application = in.readString(); + privileges = in.readStringArray(); + refreshPolicy = RefreshPolicy.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(application); + out.writeStringArray(privileges); + refreshPolicy.writeTo(out); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequestBuilder.java new file mode 100644 index 0000000000000..c1d364476ea3e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequestBuilder.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.WriteRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Builder for {@link DeletePrivilegesRequest} + */ +public final class DeletePrivilegesRequestBuilder extends ActionRequestBuilder + implements WriteRequestBuilder { + + public DeletePrivilegesRequestBuilder(ElasticsearchClient client, DeletePrivilegesAction action) { + super(client, action, new DeletePrivilegesRequest()); + } + + public DeletePrivilegesRequestBuilder privileges(String[] privileges) { + request.privileges(privileges); + return this; + } + + public DeletePrivilegesRequestBuilder application(String applicationName) { + request.application(applicationName); + return this; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesResponse.java new file mode 100644 index 0000000000000..18efb2ac5fac3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesResponse.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Response when deleting application privileges. + * Returns a collection of privileges that were successfully found and deleted. + */ +public final class DeletePrivilegesResponse extends ActionResponse implements ToXContentObject { + + private Set found; + + public DeletePrivilegesResponse() { + } + + public DeletePrivilegesResponse(Collection found) { + this.found = Collections.unmodifiableSet(new HashSet<>(found)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject().field("found", found).endObject(); + return builder; + } + + public Set found() { + return this.found; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.found = Collections.unmodifiableSet(in.readSet(StreamInput::readString)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeCollection(found, StreamOutput::writeString); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesAction.java new file mode 100644 index 0000000000000..0b8743228c523 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesAction.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.Action; + +/** + * Action for retrieving one or more application privileges from the security index + */ +public final class GetPrivilegesAction extends Action { + + public static final GetPrivilegesAction INSTANCE = new GetPrivilegesAction(); + public static final String NAME = "cluster:admin/xpack/security/privilege/get"; + + private GetPrivilegesAction() { + super(NAME); + } + + @Override + public GetPrivilegesResponse newResponse() { + return new GetPrivilegesResponse(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java new file mode 100644 index 0000000000000..559e0ab8d9877 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequest.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Request to retrieve one or more application privileges. + */ +public final class GetPrivilegesRequest extends ActionRequest implements ApplicationPrivilegesRequest { + + @Nullable + private String application; + private String[] privileges; + + public GetPrivilegesRequest() { + privileges = Strings.EMPTY_ARRAY; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (privileges == null) { + validationException = addValidationError("privileges cannot be null", validationException); + } + return validationException; + } + + public void application(String application) { + this.application = application; + } + + public String application() { + return this.application; + } + + @Override + public Collection getApplicationNames() { + return Collections.singleton(application); + } + + public void privileges(String... privileges) { + this.privileges = privileges; + } + + public String[] privileges() { + return this.privileges; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + application = in.readOptionalString(); + privileges = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(application); + out.writeStringArray(privileges); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequestBuilder.java new file mode 100644 index 0000000000000..305c8d1ff7946 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequestBuilder.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +/** + * Builder for {@link GetPrivilegesRequest} + */ +public final class GetPrivilegesRequestBuilder extends ActionRequestBuilder { + + public GetPrivilegesRequestBuilder(ElasticsearchClient client, GetPrivilegesAction action) { + super(client, action, new GetPrivilegesRequest()); + } + + public GetPrivilegesRequestBuilder privileges(String... privileges) { + request.privileges(privileges); + return this; + } + + public GetPrivilegesRequestBuilder application(String applicationName) { + request.application(applicationName); + return this; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java new file mode 100644 index 0000000000000..664673aa97e41 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponse.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; + +import java.io.IOException; +import java.util.Collection; + +/** + * Response containing one or more application privileges retrieved from the security index + */ +public final class GetPrivilegesResponse extends ActionResponse { + + private ApplicationPrivilegeDescriptor[] privileges; + + public GetPrivilegesResponse(ApplicationPrivilegeDescriptor... privileges) { + this.privileges = privileges; + } + + public GetPrivilegesResponse(Collection privileges) { + this(privileges.toArray(new ApplicationPrivilegeDescriptor[privileges.size()])); + } + + public ApplicationPrivilegeDescriptor[] privileges() { + return privileges; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.privileges = in.readArray(ApplicationPrivilegeDescriptor::new, ApplicationPrivilegeDescriptor[]::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeArray(privileges); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java new file mode 100644 index 0000000000000..3743bec144f29 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesAction.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.Action; + +/** + * Action for putting (adding/updating) one or more application privileges. + */ +public final class PutPrivilegesAction extends Action { + + public static final PutPrivilegesAction INSTANCE = new PutPrivilegesAction(); + public static final String NAME = "cluster:admin/xpack/security/privilege/put"; + + private PutPrivilegesAction() { + super(NAME); + } + + @Override + public PutPrivilegesResponse newResponse() { + return new PutPrivilegesResponse(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequest.java new file mode 100644 index 0000000000000..beba805f6df2f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequest.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.support.MetadataUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Request object to put a one or more application privileges. + */ +public final class PutPrivilegesRequest extends ActionRequest implements ApplicationPrivilegesRequest, WriteRequest { + + private List privileges; + private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; + + public PutPrivilegesRequest() { + privileges = Collections.emptyList(); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + for (ApplicationPrivilegeDescriptor privilege : privileges) { + try { + ApplicationPrivilege.validateApplicationName(privilege.getApplication()); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); + } + try { + ApplicationPrivilege.validatePrivilegeName(privilege.getName()); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); + } + if (privilege.getActions().isEmpty()) { + validationException = addValidationError("Application privileges must have at least one action", validationException); + } + for (String action : privilege.getActions()) { + if (action.indexOf('/') == -1 && action.indexOf('*') == -1 && action.indexOf(':') == -1) { + validationException = addValidationError("action [" + action + "] must contain one of [ '/' , '*' , ':' ]", + validationException); + } + try { + ApplicationPrivilege.validatePrivilegeOrActionName(action); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); + } + } + if (MetadataUtils.containsReservedMetadata(privilege.getMetadata())) { + validationException = addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX + + "] (in privilege " + privilege.getApplication() + ' ' + privilege.getName() + ")", validationException); + } + } + return validationException; + } + + /** + * Should this request trigger a refresh ({@linkplain RefreshPolicy#IMMEDIATE}, the default), wait for a refresh ( + * {@linkplain RefreshPolicy#WAIT_UNTIL}), or proceed ignore refreshes entirely ({@linkplain RefreshPolicy#NONE}). + */ + @Override + public RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } + + @Override + public PutPrivilegesRequest setRefreshPolicy(RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + public List getPrivileges() { + return privileges; + } + + public void setPrivileges(Collection privileges) { + this.privileges = Collections.unmodifiableList(new ArrayList<>(privileges)); + } + + @Override + public Collection getApplicationNames() { + return Collections.unmodifiableSet(privileges.stream() + .map(ApplicationPrivilegeDescriptor::getApplication) + .collect(Collectors.toSet())); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "{[" + privileges.stream().map(Strings::toString).collect(Collectors.joining(",")) + + "];" + refreshPolicy + "}"; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + privileges = Collections.unmodifiableList(in.readList(ApplicationPrivilegeDescriptor::new)); + refreshPolicy = RefreshPolicy.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(privileges); + refreshPolicy.writeTo(out); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java new file mode 100644 index 0000000000000..b8c2685d28a11 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilder.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.WriteRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Request builder for {@link PutPrivilegesRequest} + */ +public final class PutPrivilegesRequestBuilder extends ActionRequestBuilder + implements WriteRequestBuilder { + + public PutPrivilegesRequestBuilder(ElasticsearchClient client, PutPrivilegesAction action) { + super(client, action, new PutPrivilegesRequest()); + } + + /** + * Populate the put privileges request using the given source, application name and privilege name + * The source must contain a single privilege object which matches the application and privilege names. + */ + public PutPrivilegesRequestBuilder source(String applicationName, String expectedName, + BytesReference source, XContentType xContentType) + throws IOException { + Objects.requireNonNull(xContentType); + // EMPTY is ok here because we never call namedObject + try (InputStream stream = source.streamInput(); + XContentParser parser = xContentType.xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } + if (token == XContentParser.Token.START_OBJECT) { + final ApplicationPrivilegeDescriptor privilege = parsePrivilege(parser, applicationName, expectedName); + this.request.setPrivileges(Collections.singleton(privilege)); + } else { + throw new ElasticsearchParseException("expected an object but found {} instead", token); + } + } + return this; + } + + ApplicationPrivilegeDescriptor parsePrivilege(XContentParser parser, String applicationName, String privilegeName) throws IOException { + ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, applicationName, privilegeName, false); + checkPrivilegeName(privilege, applicationName, privilegeName); + return privilege; + } + + /** + * Populate the put privileges request using the given source, application name and privilege name + * The source must contain a top-level object, keyed by application name. + * The value for each application-name, is an object keyed by privilege name. + * The value for each privilege-name is a privilege object which much match the application and privilege names in which it is nested. + */ + public PutPrivilegesRequestBuilder source(BytesReference source, XContentType xContentType) + throws IOException { + Objects.requireNonNull(xContentType); + // EMPTY is ok here because we never call namedObject + try (InputStream stream = source.streamInput(); + XContentParser parser = xContentType.xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("expected object but found {} instead", token); + } + + List privileges = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + token = parser.currentToken(); + assert token == XContentParser.Token.FIELD_NAME : "Invalid token " + token; + final String applicationName = parser.currentName(); + + token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("expected the value for {} to be an object, but found {} instead", + applicationName, token); + } + + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + token = parser.currentToken(); + assert (token == XContentParser.Token.FIELD_NAME); + final String privilegeName = parser.currentName(); + + token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("expected the value for {} to be an object, but found {} instead", + applicationName, token); + } + privileges.add(parsePrivilege(parser, applicationName, privilegeName)); + } + } + request.setPrivileges(privileges); + } + return this; + } + + private void checkPrivilegeName(ApplicationPrivilegeDescriptor privilege, String applicationName, String providedName) { + final String privilegeName = privilege.getName(); + if (Strings.isNullOrEmpty(applicationName) == false && applicationName.equals(privilege.getApplication()) == false) { + throw new IllegalArgumentException("privilege application [" + privilege.getApplication() + + "] in source does not match the provided application [" + applicationName + "]"); + } + if (Strings.isNullOrEmpty(providedName) == false && providedName.equals(privilegeName) == false) { + throw new IllegalArgumentException("privilege name [" + privilegeName + + "] in source does not match the provided name [" + providedName + "]"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesResponse.java new file mode 100644 index 0000000000000..6d4a3f1ad44d0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesResponse.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * Response when adding one or more application privileges to the security index. + * Returns a collection of the privileges that were created (by implication, any other privileges were updated). + */ +public final class PutPrivilegesResponse extends ActionResponse implements ToXContentObject { + + private Map> created; + + PutPrivilegesResponse() { + this(Collections.emptyMap()); + } + + public PutPrivilegesResponse(Map> created) { + this.created = Collections.unmodifiableMap(created); + } + + /** + * Get a list of privileges that were created (as opposed to updated) + * @return A map from Application Name to a {@code List} of privilege names + */ + public Map> created() { + return created; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject().field("created", created).endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(created, StreamOutput::writeString, StreamOutput::writeStringList); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.created = Collections.unmodifiableMap(in.readMap(StreamInput::readString, si -> si.readList(StreamInput::readString))); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java index d0f3423fdcfe0..96c9c817182ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.security.action.role; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.WriteRequest; @@ -14,11 +15,15 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -31,11 +36,13 @@ public class PutRoleRequest extends ActionRequest implements WriteRequest indicesPrivileges = new ArrayList<>(); + private List applicationPrivileges = new ArrayList<>(); private String[] runAs = Strings.EMPTY_ARRAY; private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; private Map metadata; - + public PutRoleRequest() { } @@ -45,9 +52,25 @@ public ActionRequestValidationException validate() { if (name == null) { validationException = addValidationError("role name is missing", validationException); } + if(applicationPrivileges != null) { + for (RoleDescriptor.ApplicationResourcePrivileges privilege : applicationPrivileges) { + try { + ApplicationPrivilege.validateApplicationNameOrWildcard(privilege.getApplication()); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); + } + for (String name : privilege.getPrivileges()) { + try { + ApplicationPrivilege.validatePrivilegeOrActionName(name); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); + } + } + } + } if (metadata != null && MetadataUtils.containsReservedMetadata(metadata)) { validationException = - addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX + "]", validationException); + addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX + "]", validationException); } return validationException; } @@ -60,6 +83,10 @@ public void cluster(String... clusterPrivileges) { this.clusterPrivileges = clusterPrivileges; } + void conditionalCluster(ConditionalClusterPrivilege... conditionalClusterPrivileges) { + this.conditionalClusterPrivileges = conditionalClusterPrivileges; + } + void addIndex(RoleDescriptor.IndicesPrivileges... privileges) { this.indicesPrivileges.addAll(Arrays.asList(privileges)); } @@ -75,6 +102,10 @@ public void addIndex(String[] indices, String[] privileges, String[] grantedFiel .build()); } + void addApplicationPrivileges(RoleDescriptor.ApplicationResourcePrivileges... privileges) { + this.applicationPrivileges.addAll(Arrays.asList(privileges)); + } + public void runAs(String... usernames) { this.runAs = usernames; } @@ -110,6 +141,14 @@ public RoleDescriptor.IndicesPrivileges[] indices() { return indicesPrivileges.toArray(new RoleDescriptor.IndicesPrivileges[indicesPrivileges.size()]); } + public List applicationPrivileges() { + return Collections.unmodifiableList(applicationPrivileges); + } + + public ConditionalClusterPrivilege[] conditionalClusterPrivileges() { + return conditionalClusterPrivileges; + } + public String[] runAs() { return runAs; } @@ -128,6 +167,10 @@ public void readFrom(StreamInput in) throws IOException { for (int i = 0; i < indicesSize; i++) { indicesPrivileges.add(RoleDescriptor.IndicesPrivileges.createFrom(in)); } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + applicationPrivileges = in.readList(RoleDescriptor.ApplicationResourcePrivileges::createFrom); + conditionalClusterPrivileges = ConditionalClusterPrivileges.readArray(in); + } runAs = in.readStringArray(); refreshPolicy = RefreshPolicy.readFrom(in); metadata = in.readMap(); @@ -142,6 +185,10 @@ public void writeTo(StreamOutput out) throws IOException { for (RoleDescriptor.IndicesPrivileges index : indicesPrivileges) { index.writeTo(out); } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeStreamableList(applicationPrivileges); + ConditionalClusterPrivileges.writeArray(out, this.conditionalClusterPrivileges); + } out.writeStringArray(runAs); refreshPolicy.writeTo(out); out.writeMap(metadata); @@ -151,7 +198,11 @@ public RoleDescriptor roleDescriptor() { return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges.toArray(new RoleDescriptor.IndicesPrivileges[indicesPrivileges.size()]), + applicationPrivileges.toArray(new RoleDescriptor.ApplicationResourcePrivileges[applicationPrivileges.size()]), + conditionalClusterPrivileges, runAs, - metadata); + metadata, + Collections.emptyMap()); } -} \ No newline at end of file + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java index 25d443eda3fe9..670deb2216bf6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java @@ -40,7 +40,9 @@ public PutRoleRequestBuilder source(String name, BytesReference source, XContent assert name.equals(descriptor.getName()); request.name(name); request.cluster(descriptor.getClusterPrivileges()); + request.conditionalCluster(descriptor.getConditionalClusterPrivileges()); request.addIndex(descriptor.getIndicesPrivileges()); + request.addApplicationPrivileges(descriptor.getApplicationPrivileges()); request.runAs(descriptor.getRunAs()); request.metadata(descriptor.getMetadata()); return this; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java index 101ae00d635fc..dc43db0115e0a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequest.java @@ -5,11 +5,14 @@ */ package org.elasticsearch.xpack.core.security.action.user; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import java.io.IOException; @@ -23,6 +26,7 @@ public class HasPrivilegesRequest extends ActionRequest implements UserRequest { private String username; private String[] clusterPrivileges; private RoleDescriptor.IndicesPrivileges[] indexPrivileges; + private ApplicationResourcePrivileges[] applicationPrivileges; @Override public ActionRequestValidationException validate() { @@ -33,9 +37,21 @@ public ActionRequestValidationException validate() { if (indexPrivileges == null) { validationException = addValidationError("indexPrivileges must not be null", validationException); } - if (clusterPrivileges != null && clusterPrivileges.length == 0 && indexPrivileges != null && indexPrivileges.length == 0) { - validationException = addValidationError("clusterPrivileges and indexPrivileges cannot both be empty", - validationException); + if (applicationPrivileges == null) { + validationException = addValidationError("applicationPrivileges must not be null", validationException); + } else { + for (ApplicationResourcePrivileges applicationPrivilege : applicationPrivileges) { + try { + ApplicationPrivilege.validateApplicationName(applicationPrivilege.getApplication()); + } catch (IllegalArgumentException e) { + validationException = addValidationError(e.getMessage(), validationException); + } + } + } + if (clusterPrivileges != null && clusterPrivileges.length == 0 + && indexPrivileges != null && indexPrivileges.length == 0 + && applicationPrivileges != null && applicationPrivileges.length == 0) { + validationException = addValidationError("must specify at least one privilege", validationException); } return validationException; } @@ -67,6 +83,10 @@ public String[] clusterPrivileges() { return clusterPrivileges; } + public ApplicationResourcePrivileges[] applicationPrivileges() { + return applicationPrivileges; + } + public void indexPrivileges(RoleDescriptor.IndicesPrivileges... privileges) { this.indexPrivileges = privileges; } @@ -75,6 +95,10 @@ public void clusterPrivileges(String... privileges) { this.clusterPrivileges = privileges; } + public void applicationPrivileges(ApplicationResourcePrivileges... appPrivileges) { + this.applicationPrivileges = appPrivileges; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -85,6 +109,9 @@ public void readFrom(StreamInput in) throws IOException { for (int i = 0; i < indexSize; i++) { indexPrivileges[i] = RoleDescriptor.IndicesPrivileges.createFrom(in); } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + applicationPrivileges = in.readArray(ApplicationResourcePrivileges::createFrom, ApplicationResourcePrivileges[]::new); + } } @Override @@ -96,6 +123,9 @@ public void writeTo(StreamOutput out) throws IOException { for (RoleDescriptor.IndicesPrivileges priv : indexPrivileges) { priv.writeTo(out); } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeArray(ApplicationResourcePrivileges::write, applicationPrivileges); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestBuilder.java index 4504a95962c13..bf705da1a1f45 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestBuilder.java @@ -39,6 +39,7 @@ public HasPrivilegesRequestBuilder source(String username, BytesReference source request.username(username); request.indexPrivileges(role.getIndicesPrivileges()); request.clusterPrivileges(role.getClusterPrivileges()); + request.applicationPrivileges(role.getApplicationPrivileges()); return this; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java index dcc34d75ddbaf..b0711fc1bc12f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesResponse.java @@ -5,6 +5,11 @@ */ package org.elasticsearch.xpack.core.security.action.user; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -14,27 +19,27 @@ import java.util.Map; import java.util.Objects; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - /** * Response for a {@link HasPrivilegesRequest} */ public class HasPrivilegesResponse extends ActionResponse { private boolean completeMatch; private Map cluster; - private List index; + private List index; + private Map> application; public HasPrivilegesResponse() { - this(true, Collections.emptyMap(), Collections.emptyList()); + this(true, Collections.emptyMap(), Collections.emptyList(), Collections.emptyMap()); } - public HasPrivilegesResponse(boolean completeMatch, Map cluster, Collection index) { + public HasPrivilegesResponse(boolean completeMatch, Map cluster, Collection index, + Map> application) { super(); this.completeMatch = completeMatch; this.cluster = new HashMap<>(cluster); this.index = new ArrayList<>(index); + this.application = new HashMap<>(); + application.forEach((key, val) -> this.application.put(key, Collections.unmodifiableList(new ArrayList<>(val)))); } public boolean isCompleteMatch() { @@ -45,44 +50,67 @@ public Map getClusterPrivileges() { return Collections.unmodifiableMap(cluster); } - public List getIndexPrivileges() { + public List getIndexPrivileges() { return Collections.unmodifiableList(index); } + /** + * Retrieves the results from checking application privileges, + * @return A {@code Map} keyed by application-name + */ + public Map> getApplicationPrivileges() { + return Collections.unmodifiableMap(application); + } + public void readFrom(StreamInput in) throws IOException { super.readFrom(in); completeMatch = in.readBoolean(); - int count = in.readVInt(); - index = new ArrayList<>(count); + index = readResourcePrivileges(in); + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + application = in.readMap(StreamInput::readString, HasPrivilegesResponse::readResourcePrivileges); + } + } + + private static List readResourcePrivileges(StreamInput in) throws IOException { + final int count = in.readVInt(); + final List list = new ArrayList<>(count); for (int i = 0; i < count; i++) { final String index = in.readString(); final Map privileges = in.readMap(StreamInput::readString, StreamInput::readBoolean); - this.index.add(new IndexPrivileges(index, privileges)); + list.add(new ResourcePrivileges(index, privileges)); } + return list; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(completeMatch); - out.writeVInt(index.size()); - for (IndexPrivileges index : index) { - out.writeString(index.index); - out.writeMap(index.privileges, StreamOutput::writeString, StreamOutput::writeBoolean); + writeResourcePrivileges(out, index); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeMap(application, StreamOutput::writeString, HasPrivilegesResponse::writeResourcePrivileges); + } + } + + private static void writeResourcePrivileges(StreamOutput out, List privileges) throws IOException { + out.writeVInt(privileges.size()); + for (ResourcePrivileges priv : privileges) { + out.writeString(priv.resource); + out.writeMap(priv.privileges, StreamOutput::writeString, StreamOutput::writeBoolean); } } - public static class IndexPrivileges { - private final String index; + public static class ResourcePrivileges { + private final String resource; private final Map privileges; - public IndexPrivileges(String index, Map privileges) { - this.index = Objects.requireNonNull(index); + public ResourcePrivileges(String resource, Map privileges) { + this.resource = Objects.requireNonNull(resource); this.privileges = Collections.unmodifiableMap(privileges); } - public String getIndex() { - return index; + public String getResource() { + return resource; } public Map getPrivileges() { @@ -92,14 +120,14 @@ public Map getPrivileges() { @Override public String toString() { return getClass().getSimpleName() + "{" + - "index='" + index + '\'' + + "resource='" + resource + '\'' + ", privileges=" + privileges + '}'; } @Override public int hashCode() { - int result = index.hashCode(); + int result = resource.hashCode(); result = 31 * result + privileges.hashCode(); return result; } @@ -113,8 +141,8 @@ public boolean equals(Object o) { return false; } - final IndexPrivileges other = (IndexPrivileges) o; - return this.index.equals(other.index) && this.privileges.equals(other.privileges); + final ResourcePrivileges other = (ResourcePrivileges) o; + return this.resource.equals(other.resource) && this.privileges.equals(other.privileges); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java index 8b31e77f9f8b7..d6f678a2dcb90 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java @@ -10,60 +10,132 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.TransportMessage; +import org.elasticsearch.xpack.core.XPackField; + +import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError; /** - * The default implementation of a {@link AuthenticationFailureHandler}. This handler will return an exception with a - * RestStatus of 401 and the WWW-Authenticate header with a Basic challenge. + * The default implementation of a {@link AuthenticationFailureHandler}. This + * handler will return an exception with a RestStatus of 401 and default failure + * response headers like 'WWW-Authenticate' */ public class DefaultAuthenticationFailureHandler implements AuthenticationFailureHandler { + private final Map> defaultFailureResponseHeaders; + + /** + * Constructs default authentication failure handler + * + * @deprecated replaced by {@link #DefaultAuthenticationFailureHandler(Map)} + */ + @Deprecated + public DefaultAuthenticationFailureHandler() { + this(null); + } + + /** + * Constructs default authentication failure handler with provided default + * response headers. + * + * @param failureResponseHeaders Map of header key and list of header values to + * be sent as failure response. + * @see Realm#getAuthenticationFailureHeaders() + */ + public DefaultAuthenticationFailureHandler(Map> failureResponseHeaders) { + if (failureResponseHeaders == null || failureResponseHeaders.isEmpty()) { + failureResponseHeaders = Collections.singletonMap("WWW-Authenticate", + Collections.singletonList("Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\"")); + } + this.defaultFailureResponseHeaders = Collections.unmodifiableMap(failureResponseHeaders); + } @Override - public ElasticsearchSecurityException failedAuthentication(RestRequest request, AuthenticationToken token, - ThreadContext context) { - return authenticationError("unable to authenticate user [{}] for REST request [{}]", token.principal(), request.uri()); + public ElasticsearchSecurityException failedAuthentication(RestRequest request, AuthenticationToken token, ThreadContext context) { + return createAuthenticationError("unable to authenticate user [{}] for REST request [{}]", null, token.principal(), request.uri()); } @Override public ElasticsearchSecurityException failedAuthentication(TransportMessage message, AuthenticationToken token, String action, - ThreadContext context) { - return authenticationError("unable to authenticate user [{}] for action [{}]", token.principal(), action); + ThreadContext context) { + return createAuthenticationError("unable to authenticate user [{}] for action [{}]", null, token.principal(), action); } @Override public ElasticsearchSecurityException exceptionProcessingRequest(RestRequest request, Exception e, ThreadContext context) { - if (e instanceof ElasticsearchSecurityException) { - assert ((ElasticsearchSecurityException) e).status() == RestStatus.UNAUTHORIZED; - assert ((ElasticsearchSecurityException) e).getHeader("WWW-Authenticate").size() == 1; - return (ElasticsearchSecurityException) e; - } - return authenticationError("error attempting to authenticate request", e); + return createAuthenticationError("error attempting to authenticate request", e, (Object[]) null); } @Override public ElasticsearchSecurityException exceptionProcessingRequest(TransportMessage message, String action, Exception e, - ThreadContext context) { - if (e instanceof ElasticsearchSecurityException) { - assert ((ElasticsearchSecurityException) e).status() == RestStatus.UNAUTHORIZED; - assert ((ElasticsearchSecurityException) e).getHeader("WWW-Authenticate").size() == 1; - return (ElasticsearchSecurityException) e; - } - return authenticationError("error attempting to authenticate request", e); + ThreadContext context) { + return createAuthenticationError("error attempting to authenticate request", e, (Object[]) null); } @Override public ElasticsearchSecurityException missingToken(RestRequest request, ThreadContext context) { - return authenticationError("missing authentication token for REST request [{}]", request.uri()); + return createAuthenticationError("missing authentication token for REST request [{}]", null, request.uri()); } @Override public ElasticsearchSecurityException missingToken(TransportMessage message, String action, ThreadContext context) { - return authenticationError("missing authentication token for action [{}]", action); + return createAuthenticationError("missing authentication token for action [{}]", null, action); } @Override public ElasticsearchSecurityException authenticationRequired(String action, ThreadContext context) { - return authenticationError("action [{}] requires authentication", action); + return createAuthenticationError("action [{}] requires authentication", null, action); + } + + /** + * Creates an instance of {@link ElasticsearchSecurityException} with + * {@link RestStatus#UNAUTHORIZED} status. + *

    + * Also adds default failure response headers as configured for this + * {@link DefaultAuthenticationFailureHandler} + *

    + * It may replace existing response headers if the cause is an instance of + * {@link ElasticsearchSecurityException} + * + * @param message error message + * @param t cause, if it is an instance of + * {@link ElasticsearchSecurityException} asserts status is + * RestStatus.UNAUTHORIZED and adds headers to it, else it will + * create a new instance of {@link ElasticsearchSecurityException} + * @param args error message args + * @return instance of {@link ElasticsearchSecurityException} + */ + private ElasticsearchSecurityException createAuthenticationError(final String message, final Throwable t, final Object... args) { + final ElasticsearchSecurityException ese; + final boolean containsNegotiateWithToken; + if (t instanceof ElasticsearchSecurityException) { + assert ((ElasticsearchSecurityException) t).status() == RestStatus.UNAUTHORIZED; + ese = (ElasticsearchSecurityException) t; + if (ese.getHeader("WWW-Authenticate") != null && ese.getHeader("WWW-Authenticate").isEmpty() == false) { + /** + * If 'WWW-Authenticate' header is present with 'Negotiate ' then do not + * replace. In case of kerberos spnego mechanism, we use + * 'WWW-Authenticate' header value to communicate outToken to peer. + */ + containsNegotiateWithToken = + ese.getHeader("WWW-Authenticate").stream() + .anyMatch(s -> s != null && s.regionMatches(true, 0, "Negotiate ", 0, "Negotiate ".length())); + } else { + containsNegotiateWithToken = false; + } + } else { + ese = authenticationError(message, t, args); + containsNegotiateWithToken = false; + } + defaultFailureResponseHeaders.entrySet().stream().forEach((e) -> { + if (containsNegotiateWithToken && e.getKey().equalsIgnoreCase("WWW-Authenticate")) { + return; + } + // If it is already present then it will replace the existing header. + ese.addHeader(e.getKey(), e.getValue()); + }); + return ese; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java index 3e92be2ef904d..2c63ca95eb980 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java @@ -8,9 +8,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.security.user.User; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; /** @@ -56,6 +59,18 @@ public int order() { return config.order; } + /** + * Each realm can define response headers to be sent on failure. + *

    + * By default it adds 'WWW-Authenticate' header with auth scheme 'Basic'. + * + * @return Map of authentication failure response headers. + */ + public Map> getAuthenticationFailureHeaders() { + return Collections.singletonMap("WWW-Authenticate", + Collections.singletonList("Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\"")); + } + @Override public int compareTo(Realm other) { int result = Integer.compare(config.order, other.config.order); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/kerberos/KerberosRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/kerberos/KerberosRealmSettings.java new file mode 100644 index 0000000000000..7524ef08c1e72 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/kerberos/KerberosRealmSettings.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authc.kerberos; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; + +import java.util.Set; + +/** + * Kerberos Realm settings + */ +public final class KerberosRealmSettings { + public static final String TYPE = "kerberos"; + + /** + * Kerberos key tab for Elasticsearch service
    + * Uses single key tab for multiple service accounts. + */ + public static final Setting HTTP_SERVICE_KEYTAB_PATH = + Setting.simpleString("keytab.path", Property.NodeScope); + public static final Setting SETTING_KRB_DEBUG_ENABLE = + Setting.boolSetting("krb.debug", Boolean.FALSE, Property.NodeScope); + public static final Setting SETTING_REMOVE_REALM_NAME = + Setting.boolSetting("remove_realm_name", Boolean.FALSE, Property.NodeScope); + + // Cache + private static final TimeValue DEFAULT_TTL = TimeValue.timeValueMinutes(20); + private static final int DEFAULT_MAX_USERS = 100_000; // 100k users + public static final Setting CACHE_TTL_SETTING = Setting.timeSetting("cache.ttl", DEFAULT_TTL, Setting.Property.NodeScope); + public static final Setting CACHE_MAX_USERS_SETTING = + Setting.intSetting("cache.max_users", DEFAULT_MAX_USERS, Property.NodeScope); + + private KerberosRealmSettings() { + } + + /** + * @return the valid set of {@link Setting}s for a {@value #TYPE} realm + */ + public static Set> getSettings() { + return Sets.newHashSet(HTTP_SERVICE_KEYTAB_PATH, CACHE_TTL_SETTING, CACHE_MAX_USERS_SETTING, SETTING_KRB_DEBUG_ENABLE, + SETTING_REMOVE_REALM_NAME); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index 65bde9bd1dfe5..42bd771103fdd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -18,11 +18,14 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.elasticsearch.xpack.core.security.support.Validation; import org.elasticsearch.xpack.core.security.xcontent.XContentUtils; @@ -31,9 +34,11 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; /** * A holder for a Role that contains user-readable information about the Role @@ -45,7 +50,9 @@ public class RoleDescriptor implements ToXContentObject { private final String name; private final String[] clusterPrivileges; + private final ConditionalClusterPrivilege[] conditionalClusterPrivileges; private final IndicesPrivileges[] indicesPrivileges; + private final ApplicationResourcePrivileges[] applicationPrivileges; private final String[] runAs; private final Map metadata; private final Map transientMetadata; @@ -57,6 +64,11 @@ public RoleDescriptor(String name, this(name, clusterPrivileges, indicesPrivileges, runAs, null); } + /** + * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], + * ConditionalClusterPrivilege[], String[], Map, Map)} + */ + @Deprecated public RoleDescriptor(String name, @Nullable String[] clusterPrivileges, @Nullable IndicesPrivileges[] indicesPrivileges, @@ -65,16 +77,34 @@ public RoleDescriptor(String name, this(name, clusterPrivileges, indicesPrivileges, runAs, metadata, null); } + /** + * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], + * ConditionalClusterPrivilege[], String[], Map, Map)} + */ + @Deprecated + public RoleDescriptor(String name, + @Nullable String[] clusterPrivileges, + @Nullable IndicesPrivileges[] indicesPrivileges, + @Nullable String[] runAs, + @Nullable Map metadata, + @Nullable Map transientMetadata) { + this(name, clusterPrivileges, indicesPrivileges, null, null, runAs, metadata, transientMetadata); + } public RoleDescriptor(String name, @Nullable String[] clusterPrivileges, @Nullable IndicesPrivileges[] indicesPrivileges, + @Nullable ApplicationResourcePrivileges[] applicationPrivileges, + @Nullable ConditionalClusterPrivilege[] conditionalClusterPrivileges, @Nullable String[] runAs, @Nullable Map metadata, @Nullable Map transientMetadata) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; + this.conditionalClusterPrivileges = conditionalClusterPrivileges != null + ? conditionalClusterPrivileges : ConditionalClusterPrivileges.EMPTY_ARRAY; this.indicesPrivileges = indicesPrivileges != null ? indicesPrivileges : IndicesPrivileges.NONE; + this.applicationPrivileges = applicationPrivileges != null ? applicationPrivileges : ApplicationResourcePrivileges.NONE; this.runAs = runAs != null ? runAs : Strings.EMPTY_ARRAY; this.metadata = metadata != null ? Collections.unmodifiableMap(metadata) : Collections.emptyMap(); this.transientMetadata = transientMetadata != null ? Collections.unmodifiableMap(transientMetadata) : @@ -89,10 +119,18 @@ public String[] getClusterPrivileges() { return this.clusterPrivileges; } + public ConditionalClusterPrivilege[] getConditionalClusterPrivileges() { + return this.conditionalClusterPrivileges; + } + public IndicesPrivileges[] getIndicesPrivileges() { return this.indicesPrivileges; } + public ApplicationResourcePrivileges[] getApplicationPrivileges() { + return this.applicationPrivileges; + } + public String[] getRunAs() { return this.runAs; } @@ -114,10 +152,15 @@ public String toString() { StringBuilder sb = new StringBuilder("Role["); sb.append("name=").append(name); sb.append(", cluster=[").append(Strings.arrayToCommaDelimitedString(clusterPrivileges)); + sb.append("], global=[").append(Strings.arrayToCommaDelimitedString(conditionalClusterPrivileges)); sb.append("], indicesPrivileges=["); for (IndicesPrivileges group : indicesPrivileges) { sb.append(group.toString()).append(","); } + sb.append("], applicationPrivileges=["); + for (ApplicationResourcePrivileges privilege : applicationPrivileges) { + sb.append(privilege.toString()).append(","); + } sb.append("], runAs=[").append(Strings.arrayToCommaDelimitedString(runAs)); sb.append("], metadata=["); MetadataUtils.writeValue(sb, metadata); @@ -134,7 +177,9 @@ public boolean equals(Object o) { if (!name.equals(that.name)) return false; if (!Arrays.equals(clusterPrivileges, that.clusterPrivileges)) return false; + if (!Arrays.equals(conditionalClusterPrivileges, that.conditionalClusterPrivileges)) return false; if (!Arrays.equals(indicesPrivileges, that.indicesPrivileges)) return false; + if (!Arrays.equals(applicationPrivileges, that.applicationPrivileges)) return false; if (!metadata.equals(that.getMetadata())) return false; return Arrays.equals(runAs, that.runAs); } @@ -143,7 +188,9 @@ public boolean equals(Object o) { public int hashCode() { int result = name.hashCode(); result = 31 * result + Arrays.hashCode(clusterPrivileges); + result = 31 * result + Arrays.hashCode(conditionalClusterPrivileges); result = 31 * result + Arrays.hashCode(indicesPrivileges); + result = 31 * result + Arrays.hashCode(applicationPrivileges); result = 31 * result + Arrays.hashCode(runAs); result = 31 * result + metadata.hashCode(); return result; @@ -157,8 +204,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws /** * Generates x-content for this {@link RoleDescriptor} instance. * - * @param builder the x-content builder - * @param params the parameters for x-content generation directives + * @param builder the x-content builder + * @param params the parameters for x-content generation directives * @param docCreation {@code true} if the x-content is being generated for creating a document * in the security index, {@code false} if the x-content being generated * is for API display purposes @@ -168,7 +215,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation) throws IOException { builder.startObject(); builder.array(Fields.CLUSTER.getPreferredName(), clusterPrivileges); + if (conditionalClusterPrivileges.length != 0) { + builder.field(Fields.GLOBAL.getPreferredName()); + ConditionalClusterPrivileges.toXContent(builder, params, Arrays.asList(conditionalClusterPrivileges)); + } builder.array(Fields.INDICES.getPreferredName(), (Object[]) indicesPrivileges); + builder.array(Fields.APPLICATIONS.getPreferredName(), (Object[]) applicationPrivileges); if (runAs != null) { builder.array(Fields.RUN_AS.getPreferredName(), runAs); } @@ -198,7 +250,19 @@ public static RoleDescriptor readFrom(StreamInput in) throws IOException { } else { transientMetadata = Collections.emptyMap(); } - return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges, runAs, metadata, transientMetadata); + + final ApplicationResourcePrivileges[] applicationPrivileges; + final ConditionalClusterPrivilege[] conditionalClusterPrivileges; + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + applicationPrivileges = in.readArray(ApplicationResourcePrivileges::createFrom, ApplicationResourcePrivileges[]::new); + conditionalClusterPrivileges = ConditionalClusterPrivileges.readArray(in); + } else { + applicationPrivileges = ApplicationResourcePrivileges.NONE; + conditionalClusterPrivileges = ConditionalClusterPrivileges.EMPTY_ARRAY; + } + + return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges, applicationPrivileges, conditionalClusterPrivileges, + runAs, metadata, transientMetadata); } public static void writeTo(RoleDescriptor descriptor, StreamOutput out) throws IOException { @@ -213,6 +277,10 @@ public static void writeTo(RoleDescriptor descriptor, StreamOutput out) throws I if (out.getVersion().onOrAfter(Version.V_5_2_0)) { out.writeMap(descriptor.transientMetadata); } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeArray(ApplicationResourcePrivileges::write, descriptor.applicationPrivileges); + ConditionalClusterPrivileges.writeArray(out, descriptor.getConditionalClusterPrivileges()); + } } public static RoleDescriptor parse(String name, BytesReference source, boolean allow2xFormat, XContentType xContentType) @@ -221,7 +289,7 @@ public static RoleDescriptor parse(String name, BytesReference source, boolean a // EMPTY is safe here because we never use namedObject try (InputStream stream = source.streamInput(); XContentParser parser = xContentType.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return parse(name, parser, allow2xFormat); } } @@ -243,6 +311,8 @@ public static RoleDescriptor parse(String name, XContentParser parser, boolean a String currentFieldName = null; IndicesPrivileges[] indicesPrivileges = null; String[] clusterPrivileges = null; + List conditionalClusterPrivileges = Collections.emptyList(); + ApplicationResourcePrivileges[] applicationPrivileges = null; String[] runAsUsers = null; Map metadata = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -255,6 +325,11 @@ public static RoleDescriptor parse(String name, XContentParser parser, boolean a runAsUsers = readStringArray(name, parser, true); } else if (Fields.CLUSTER.match(currentFieldName, parser.getDeprecationHandler())) { clusterPrivileges = readStringArray(name, parser, true); + } else if (Fields.APPLICATIONS.match(currentFieldName, parser.getDeprecationHandler()) + || Fields.APPLICATION.match(currentFieldName, parser.getDeprecationHandler())) { + applicationPrivileges = parseApplicationPrivileges(name, parser); + } else if (Fields.GLOBAL.match(currentFieldName, parser.getDeprecationHandler())) { + conditionalClusterPrivileges = ConditionalClusterPrivileges.parse(parser); } else if (Fields.METADATA.match(currentFieldName, parser.getDeprecationHandler())) { if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException( @@ -266,8 +341,7 @@ public static RoleDescriptor parse(String name, XContentParser parser, boolean a // consume object but just drop parser.map(); } else { - throw new ElasticsearchParseException("expected field [{}] to be an object, but found [{}] instead", - currentFieldName, token); + throw new ElasticsearchParseException("failed to parse role [{}]. unexpected field [{}]", name, currentFieldName); } } else if (Fields.TYPE.match(currentFieldName, parser.getDeprecationHandler())) { // don't need it @@ -275,7 +349,9 @@ public static RoleDescriptor parse(String name, XContentParser parser, boolean a throw new ElasticsearchParseException("failed to parse role [{}]. unexpected field [{}]", name, currentFieldName); } } - return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges, runAsUsers, metadata); + return new RoleDescriptor(name, clusterPrivileges, indicesPrivileges, applicationPrivileges, + conditionalClusterPrivileges.toArray(new ConditionalClusterPrivilege[conditionalClusterPrivileges.size()]), runAsUsers, + metadata, null); } private static String[] readStringArray(String roleName, XContentParser parser, boolean allowNull) throws IOException { @@ -291,7 +367,7 @@ public static RoleDescriptor parsePrivilegesCheck(String description, BytesRefer throws IOException { try (InputStream stream = source.streamInput(); XContentParser parser = xContentType.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { // advance to the START_OBJECT token XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { @@ -301,6 +377,7 @@ public static RoleDescriptor parsePrivilegesCheck(String description, BytesRefer String currentFieldName = null; IndicesPrivileges[] indexPrivileges = null; String[] clusterPrivileges = null; + ApplicationResourcePrivileges[] applicationPrivileges = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -308,14 +385,17 @@ public static RoleDescriptor parsePrivilegesCheck(String description, BytesRefer indexPrivileges = parseIndices(description, parser, false); } else if (Fields.CLUSTER.match(currentFieldName, parser.getDeprecationHandler())) { clusterPrivileges = readStringArray(description, parser, true); + } else if (Fields.APPLICATIONS.match(currentFieldName, parser.getDeprecationHandler()) + || Fields.APPLICATION.match(currentFieldName, parser.getDeprecationHandler())) { + applicationPrivileges = parseApplicationPrivileges(description, parser); } else { throw new ElasticsearchParseException("failed to parse privileges check [{}]. unexpected field [{}]", description, currentFieldName); } } - if (indexPrivileges == null && clusterPrivileges == null) { - throw new ElasticsearchParseException("failed to parse privileges check [{}]. fields [{}] and [{}] are both missing", - description, Fields.INDEX, Fields.CLUSTER); + if (indexPrivileges == null && clusterPrivileges == null && applicationPrivileges == null) { + throw new ElasticsearchParseException("failed to parse privileges check [{}]. All privilege fields [{},{},{}] are missing", + description, Fields.CLUSTER, Fields.INDEX, Fields.APPLICATIONS); } if (indexPrivileges != null) { if (Arrays.stream(indexPrivileges).anyMatch(IndicesPrivileges::isUsingFieldLevelSecurity)) { @@ -326,7 +406,7 @@ public static RoleDescriptor parsePrivilegesCheck(String description, BytesRefer throw new ElasticsearchParseException("Field [{}] is not supported in a has_privileges request", Fields.QUERY); } } - return new RoleDescriptor(description, clusterPrivileges, indexPrivileges, null); + return new RoleDescriptor(description, clusterPrivileges, indexPrivileges, applicationPrivileges, null, null, null, null); } } @@ -361,7 +441,7 @@ private static RoleDescriptor.IndicesPrivileges parseIndex(String roleName, XCon currentFieldName = parser.currentName(); } else if (Fields.NAMES.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.VALUE_STRING) { - names = new String[] { parser.text() }; + names = new String[]{parser.text()}; } else if (token == XContentParser.Token.START_ARRAY) { names = readStringArray(roleName, parser, false); if (names.length == 0) { @@ -474,6 +554,37 @@ private static RoleDescriptor.IndicesPrivileges parseIndex(String roleName, XCon .build(); } + private static ApplicationResourcePrivileges[] parseApplicationPrivileges(String roleName, XContentParser parser) + throws IOException { + if (parser.currentToken() != XContentParser.Token.START_ARRAY) { + throw new ElasticsearchParseException("failed to parse application privileges for role [{}]. expected field [{}] value " + + "to be an array, but found [{}] instead", roleName, parser.currentName(), parser.currentToken()); + } + List privileges = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + privileges.add(parseApplicationPrivilege(roleName, parser)); + } + return privileges.toArray(new ApplicationResourcePrivileges[privileges.size()]); + } + + private static ApplicationResourcePrivileges parseApplicationPrivilege(String roleName, XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse application privileges for role [{}]. expected field [{}] value to " + + "be an array of objects, but found an array element of type [{}]", roleName, parser.currentName(), token); + } + final ApplicationResourcePrivileges.Builder builder = ApplicationResourcePrivileges.PARSER.parse(parser, null); + if (builder.hasResources() == false) { + throw new ElasticsearchParseException("failed to parse application privileges for role [{}]. missing required [{}] field", + roleName, Fields.RESOURCES.getPreferredName()); + } + if (builder.hasPrivileges() == false) { + throw new ElasticsearchParseException("failed to parse application privileges for role [{}]. missing required [{}] field", + roleName, Fields.PRIVILEGES.getPreferredName()); + } + return builder.build(); + } + /** * A class representing permissions for a group of indices mapped to * privileges, field permissions, and a query. @@ -695,14 +806,176 @@ public IndicesPrivileges build() { } } + public static class ApplicationResourcePrivileges implements ToXContentObject, Streamable { + + private static final ApplicationResourcePrivileges[] NONE = new ApplicationResourcePrivileges[0]; + private static final ObjectParser PARSER = new ObjectParser<>("application", + ApplicationResourcePrivileges::builder); + + static { + PARSER.declareString(Builder::application, Fields.APPLICATION); + PARSER.declareStringArray(Builder::privileges, Fields.PRIVILEGES); + PARSER.declareStringArray(Builder::resources, Fields.RESOURCES); + } + + private String application; + private String[] privileges; + private String[] resources; + + private ApplicationResourcePrivileges() { + } + + public static Builder builder() { + return new Builder(); + } + + public String getApplication() { + return application; + } + + public String[] getResources() { + return this.resources; + } + + public String[] getPrivileges() { + return this.privileges; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(getClass().getSimpleName()) + .append("[application=") + .append(application) + .append(", privileges=[") + .append(Strings.arrayToCommaDelimitedString(privileges)) + .append("], resources=[") + .append(Strings.arrayToCommaDelimitedString(resources)) + .append("]]"); + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || this.getClass() != o.getClass()) { + return false; + } + + ApplicationResourcePrivileges that = (ApplicationResourcePrivileges) o; + + return Objects.equals(this.application, that.application) + && Arrays.equals(this.resources, that.resources) + && Arrays.equals(this.privileges, that.privileges); + } + + @Override + public int hashCode() { + int result = Arrays.hashCode(resources); + result = 31 * result + Arrays.hashCode(privileges); + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.APPLICATION.getPreferredName(), application); + builder.array(Fields.PRIVILEGES.getPreferredName(), privileges); + builder.array(Fields.RESOURCES.getPreferredName(), resources); + return builder.endObject(); + } + + public static ApplicationResourcePrivileges createFrom(StreamInput in) throws IOException { + ApplicationResourcePrivileges ip = new ApplicationResourcePrivileges(); + ip.readFrom(in); + return ip; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.application = in.readString(); + this.privileges = in.readStringArray(); + this.resources = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(application); + out.writeStringArray(privileges); + out.writeStringArray(resources); + } + + public static void write(StreamOutput out, ApplicationResourcePrivileges privileges) throws IOException { + privileges.writeTo(out); + } + + public static class Builder { + + private ApplicationResourcePrivileges applicationPrivileges = new ApplicationResourcePrivileges(); + + private Builder() { + } + + public Builder application(String appName) { + applicationPrivileges.application = appName; + return this; + } + + public Builder resources(String... resources) { + applicationPrivileges.resources = resources; + return this; + } + + public Builder resources(List resources) { + return resources(resources.toArray(new String[resources.size()])); + } + + public Builder privileges(String... privileges) { + applicationPrivileges.privileges = privileges; + return this; + } + + public Builder privileges(Collection privileges) { + return privileges(privileges.toArray(new String[privileges.size()])); + } + + public boolean hasResources() { + return applicationPrivileges.resources != null; + } + + public boolean hasPrivileges() { + return applicationPrivileges.privileges != null; + } + + public ApplicationResourcePrivileges build() { + if (Strings.isNullOrEmpty(applicationPrivileges.application)) { + throw new IllegalArgumentException("application privileges must have an application name"); + } + if (applicationPrivileges.privileges == null || applicationPrivileges.privileges.length == 0) { + throw new IllegalArgumentException("application privileges must define at least one privilege"); + } + if (applicationPrivileges.resources == null || applicationPrivileges.resources.length == 0) { + throw new IllegalArgumentException("application privileges must refer to at least one resource"); + } + return applicationPrivileges; + } + + } + } + public interface Fields { ParseField CLUSTER = new ParseField("cluster"); + ParseField GLOBAL = new ParseField("global"); ParseField INDEX = new ParseField("index"); ParseField INDICES = new ParseField("indices"); + ParseField APPLICATIONS = new ParseField("applications"); ParseField RUN_AS = new ParseField("run_as"); ParseField NAMES = new ParseField("names"); + ParseField RESOURCES = new ParseField("resources"); ParseField QUERY = new ParseField("query"); ParseField PRIVILEGES = new ParseField("privileges"); + ParseField APPLICATION = new ParseField("application"); ParseField FIELD_PERMISSIONS = new ParseField("field_security"); ParseField FIELD_PERMISSIONS_2X = new ParseField("fields"); ParseField GRANT_FIELDS = new ParseField("grant"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java new file mode 100644 index 0000000000000..8f1e78a4663e4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.permission; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Predicate; + +/** + * A permission that is based on privileges for application (non elasticsearch) capabilities + */ +public final class ApplicationPermission { + + public static final ApplicationPermission NONE = new ApplicationPermission(Collections.emptyList()); + + private final Logger logger; + private final List permissions; + + /** + * @param privilegesAndResources A list of (privilege, resources). Each element in the {@link List} is a {@link Tuple} containing + * a single {@link ApplicationPrivilege} and the {@link Set} of resources to which that privilege is + * applied. The resources are treated as a wildcard {@link Automatons#pattern}. + */ + ApplicationPermission(List>> privilegesAndResources) { + this.logger = Loggers.getLogger(getClass()); + Map permissionsByPrivilege = new HashMap<>(); + privilegesAndResources.forEach(tup -> permissionsByPrivilege.compute(tup.v1(), (k, existing) -> { + final Automaton patterns = Automatons.patterns(tup.v2()); + if (existing == null) { + return new PermissionEntry(k, patterns); + } else { + return new PermissionEntry(k, Automatons.unionAndMinimize(Arrays.asList(existing.resources, patterns))); + } + })); + this.permissions = Collections.unmodifiableList(new ArrayList<>(permissionsByPrivilege.values())); + } + + /** + * Determines whether this permission grants the specified privilege on the given resource. + *

    + * An {@link ApplicationPermission} consists of a sequence of permission entries, where each entry contains a single + * {@link ApplicationPrivilege} and one or more resource patterns. + *

    + *

    + * This method returns {@code true} if, one or more of those entries meet the following criteria + *

    + *
      + *
    • The entry's application, when interpreted as an {@link Automaton} {@link Automatons#pattern(String) pattern} matches the + * application given in the argument (interpreted as a raw string) + *
    • + *
    • The {@link ApplicationPrivilege#getAutomaton automaton that defines the entry's actions} entirely covers the + * automaton given in the argument (that is, the argument is a subset of the entry's automaton) + *
    • + *
    • The entry's resources, when interpreted as an {@link Automaton} {@link Automatons#patterns(String...)} set of patterns} entirely + * covers the resource given in the argument (also interpreted as an {@link Automaton} {@link Automatons#pattern(String) pattern}. + *
    • + *
    + */ + public boolean grants(ApplicationPrivilege other, String resource) { + Automaton resourceAutomaton = Automatons.patterns(resource); + final boolean matched = permissions.stream().anyMatch(e -> e.grants(other, resourceAutomaton)); + logger.trace("Permission [{}] {} grant [{} , {}]", this, matched ? "does" : "does not", other, resource); + return matched; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "{privileges=" + permissions + "}"; + } + + private static class PermissionEntry { + private final ApplicationPrivilege privilege; + private final Predicate application; + private final Automaton resources; + + private PermissionEntry(ApplicationPrivilege privilege, Automaton resources) { + this.privilege = privilege; + this.application = Automatons.predicate(privilege.getApplication()); + this.resources = resources; + } + + private boolean grants(ApplicationPrivilege other, Automaton resource) { + return this.application.test(other.getApplication()) + && Operations.isEmpty(privilege.getAutomaton()) == false + && Operations.subsetOf(other.getAutomaton(), privilege.getAutomaton()) + && Operations.subsetOf(resource, this.resources); + } + + @Override + public String toString() { + return privilege.toString(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 7c990bd735a41..370fd70b169e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -5,30 +5,97 @@ */ package org.elasticsearch.xpack.core.security.authz.permission; +import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import java.util.Collection; +import java.util.Set; import java.util.function.Predicate; +import java.util.stream.Collectors; /** - * A permission that is based on privileges for cluster wide actions + * A permission that is based on privileges for cluster wide actions, with the optional ability to inspect the request object */ -public final class ClusterPermission { - - public static final ClusterPermission NONE = new ClusterPermission(ClusterPrivilege.NONE); - +public abstract class ClusterPermission { private final ClusterPrivilege privilege; - private final Predicate predicate; ClusterPermission(ClusterPrivilege privilege) { this.privilege = privilege; - this.predicate = privilege.predicate(); } public ClusterPrivilege privilege() { return privilege; } - public boolean check(String action) { - return predicate.test(action); + public abstract boolean check(String action, TransportRequest request); + + /** + * A permission that is based solely on cluster privileges and does not consider request state + */ + public static class SimpleClusterPermission extends ClusterPermission { + + public static final SimpleClusterPermission NONE = new SimpleClusterPermission(ClusterPrivilege.NONE); + + private final Predicate predicate; + + SimpleClusterPermission(ClusterPrivilege privilege) { + super(privilege); + this.predicate = privilege.predicate(); + } + + @Override + public boolean check(String action, TransportRequest request) { + return predicate.test(action); + } + } + + /** + * A permission that makes use of both cluster privileges and request inspection + */ + public static class ConditionalClusterPermission extends ClusterPermission { + private final Predicate actionPredicate; + private final Predicate requestPredicate; + + public ConditionalClusterPermission(ConditionalClusterPrivilege conditionalPrivilege) { + this(conditionalPrivilege.getPrivilege(), conditionalPrivilege.getRequestPredicate()); + } + + public ConditionalClusterPermission(ClusterPrivilege privilege, Predicate requestPredicate) { + super(privilege); + this.actionPredicate = privilege.predicate(); + this.requestPredicate = requestPredicate; + } + + @Override + public boolean check(String action, TransportRequest request) { + return actionPredicate.test(action) && requestPredicate.test(request); + } + } + + /** + * A permission that composes a number of other cluster permissions + */ + public static class CompositeClusterPermission extends ClusterPermission { + private final Collection children; + + public CompositeClusterPermission(Collection children) { + super(buildPrivilege(children)); + this.children = children; + } + + private static ClusterPrivilege buildPrivilege(Collection children) { + final Set names = children.stream() + .map(ClusterPermission::privilege) + .map(ClusterPrivilege::name) + .flatMap(Set::stream) + .collect(Collectors.toSet()); + return ClusterPrivilege.get(names); + } + + @Override + public boolean check(String action, TransportRequest request) { + return children.stream().anyMatch(p -> p.check(action, request)); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java index a850a4a16f65b..fd7eb4864198e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java @@ -8,14 +8,18 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -29,12 +33,14 @@ public final class Role { private final String[] names; private final ClusterPermission cluster; private final IndicesPermission indices; + private final ApplicationPermission application; private final RunAsPermission runAs; - Role(String[] names, ClusterPermission cluster, IndicesPermission indices, RunAsPermission runAs) { + Role(String[] names, ClusterPermission cluster, IndicesPermission indices, ApplicationPermission application, RunAsPermission runAs) { this.names = names; this.cluster = Objects.requireNonNull(cluster); this.indices = Objects.requireNonNull(indices); + this.application = Objects.requireNonNull(application); this.runAs = Objects.requireNonNull(runAs); } @@ -50,6 +56,10 @@ public IndicesPermission indices() { return indices; } + public ApplicationPermission application() { + return application; + } + public RunAsPermission runAs() { return runAs; } @@ -70,7 +80,7 @@ public static Builder builder(RoleDescriptor rd, FieldPermissionsCache fieldPerm public IndicesAccessControl authorize(String action, Set requestedIndicesOrAliases, MetaData metaData, FieldPermissionsCache fieldPermissionsCache) { Map indexPermissions = indices.authorize( - action, requestedIndicesOrAliases, metaData, fieldPermissionsCache + action, requestedIndicesOrAliases, metaData, fieldPermissionsCache ); // At least one role / indices permission set need to match with all the requested indices/aliases: @@ -87,9 +97,10 @@ public IndicesAccessControl authorize(String action, Set requestedIndice public static class Builder { private final String[] names; - private ClusterPermission cluster = ClusterPermission.NONE; + private ClusterPermission cluster = ClusterPermission.SimpleClusterPermission.NONE; private RunAsPermission runAs = RunAsPermission.NONE; private List groups = new ArrayList<>(); + private List>> applicationPrivs = new ArrayList<>(); private Builder(String[] names) { this.names = names; @@ -97,20 +108,44 @@ private Builder(String[] names) { private Builder(RoleDescriptor rd, @Nullable FieldPermissionsCache fieldPermissionsCache) { this.names = new String[] { rd.getName() }; - if (rd.getClusterPrivileges().length == 0) { - cluster = ClusterPermission.NONE; - } else { - this.cluster(ClusterPrivilege.get(Sets.newHashSet(rd.getClusterPrivileges()))); - } + cluster(Sets.newHashSet(rd.getClusterPrivileges()), Arrays.asList(rd.getConditionalClusterPrivileges())); groups.addAll(convertFromIndicesPrivileges(rd.getIndicesPrivileges(), fieldPermissionsCache)); + + final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = rd.getApplicationPrivileges(); + for (int i = 0; i < applicationPrivileges.length; i++) { + applicationPrivs.add(convertApplicationPrivilege(rd.getName(), i, applicationPrivileges[i])); + } + String[] rdRunAs = rd.getRunAs(); if (rdRunAs != null && rdRunAs.length > 0) { this.runAs(new Privilege(Sets.newHashSet(rdRunAs), rdRunAs)); } } + public Builder cluster(Set privilegeNames, Iterable conditionalClusterPrivileges) { + List clusterPermissions = new ArrayList<>(); + if (privilegeNames.isEmpty() == false) { + clusterPermissions.add(new ClusterPermission.SimpleClusterPermission(ClusterPrivilege.get(privilegeNames))); + } + for (ConditionalClusterPrivilege ccp : conditionalClusterPrivileges) { + clusterPermissions.add(new ClusterPermission.ConditionalClusterPermission(ccp)); + } + if (clusterPermissions.isEmpty()) { + this.cluster = ClusterPermission.SimpleClusterPermission.NONE; + } else if (clusterPermissions.size() == 1) { + this.cluster = clusterPermissions.get(0); + } else { + this.cluster = new ClusterPermission.CompositeClusterPermission(clusterPermissions); + } + return this; + } + + /** + * @deprecated Use {@link #cluster(Set, Iterable)} + */ + @Deprecated public Builder cluster(ClusterPrivilege privilege) { - cluster = new ClusterPermission(privilege); + cluster = new ClusterPermission.SimpleClusterPermission(privilege); return this; } @@ -129,10 +164,17 @@ public Builder add(FieldPermissions fieldPermissions, Set query, return this; } + public Builder addApplicationPrivilege(ApplicationPrivilege privilege, Set resources) { + applicationPrivs.add(new Tuple<>(privilege, resources)); + return this; + } + public Role build() { IndicesPermission indices = groups.isEmpty() ? IndicesPermission.NONE : - new IndicesPermission(groups.toArray(new IndicesPermission.Group[groups.size()])); - return new Role(names, cluster, indices, runAs); + new IndicesPermission(groups.toArray(new IndicesPermission.Group[groups.size()])); + final ApplicationPermission applicationPermission + = applicationPrivs.isEmpty() ? ApplicationPermission.NONE : new ApplicationPermission(applicationPrivs); + return new Role(names, cluster, indices, applicationPermission, runAs); } static List convertFromIndicesPrivileges(RoleDescriptor.IndicesPrivileges[] indicesPrivileges, @@ -144,16 +186,24 @@ static List convertFromIndicesPrivileges(RoleDescriptor fieldPermissions = fieldPermissionsCache.getFieldPermissions(privilege.getGrantedFields(), privilege.getDeniedFields()); } else { fieldPermissions = new FieldPermissions( - new FieldPermissionsDefinition(privilege.getGrantedFields(), privilege.getDeniedFields())); + new FieldPermissionsDefinition(privilege.getGrantedFields(), privilege.getDeniedFields())); } final Set query = privilege.getQuery() == null ? null : Collections.singleton(privilege.getQuery()); list.add(new IndicesPermission.Group(IndexPrivilege.get(Sets.newHashSet(privilege.getPrivileges())), - fieldPermissions, - query, - privilege.getIndices())); + fieldPermissions, + query, + privilege.getIndices())); } return list; } + + static Tuple> convertApplicationPrivilege(String role, int index, + RoleDescriptor.ApplicationResourcePrivileges arp) { + return new Tuple<>(new ApplicationPrivilege(arp.getApplication(), + "role." + role.replaceAll("[^a-zA-Z0-9]", "") + "." + index, + arp.getPrivileges() + ), Sets.newHashSet(arp.getResources())); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilege.java new file mode 100644 index 0000000000000..13db17a63bb0d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilege.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.common.Strings; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * An application privilege has an application name (e.g. {@code "my-app"}) that identifies an application (that exists + * outside of elasticsearch), a privilege name (e.g. {@code "admin}) that is meaningful to that application, and one or + * more "action patterns" (e.g {@code "admin/user/*", "admin/team/*"}). + * Action patterns must contain at least one special character from ({@code /}, {@code :}, {@code *}) to distinguish them + * from privilege names. + * The action patterns are entirely optional - many application will find that simple "privilege names" are sufficient, but + * they allow applications to define high level abstract privileges that map to multiple low level capabilities. + */ +public final class ApplicationPrivilege extends Privilege { + + private static final Pattern VALID_APPLICATION_PREFIX = Pattern.compile("^[a-z][A-Za-z0-9]*$"); + private static final Pattern WHITESPACE = Pattern.compile("[\\v\\h]"); + private static final Pattern VALID_NAME = Pattern.compile("^[a-z][a-zA-Z0-9_.-]*$"); + + /** + * A name or action must be composed of printable, visible ASCII characters. + * That is: letters, numbers & symbols, but no whitespace. + */ + private static final Pattern VALID_NAME_OR_ACTION = Pattern.compile("^\\p{Graph}*$"); + + public static final Function NONE = app -> new ApplicationPrivilege(app, "none", new String[0]); + + private final String application; + private final String[] patterns; + + public ApplicationPrivilege(String application, String privilegeName, String... patterns) { + this(application, Collections.singleton(privilegeName), patterns); + } + + public ApplicationPrivilege(String application, Set name, String... patterns) { + super(name, patterns); + this.application = application; + this.patterns = patterns; + } + + public String getApplication() { + return application; + } + + // Package level for testing + String[] getPatterns() { + return patterns; + } + + /** + * Validate that the provided application name is valid, and throws an exception otherwise + * + * @throws IllegalArgumentException if the name is not valid + */ + public static void validateApplicationName(String application) { + validateApplicationName(application, false); + } + + /** + * Validate that the provided name is a valid application, or a wildcard pattern for an application and throws an exception otherwise + * + * @throws IllegalArgumentException if the name is not valid + */ + public static void validateApplicationNameOrWildcard(String application) { + validateApplicationName(application, true); + } + + /** + * Validates that an application name matches the following rules: + * - consist of a "prefix", optionally followed by either "-" or "_" and a suffix + * - the prefix must begin with a lowercase ASCII letter + * - the prefix only contain ASCII letter or digits + * - the prefix must be at least 3 characters long + * - the suffix must only contain {@link Strings#validFileName valid filename} characters + * - no part of the name may contain whitespace + * If {@code allowWildcard} is true, then the names that end with a '*', and would match a valid + * application name are also accepted. + */ + private static void validateApplicationName(String application, boolean allowWildcard) { + if (Strings.isEmpty(application)) { + throw new IllegalArgumentException("Application names cannot be blank"); + } + final int asterisk = application.indexOf('*'); + if (asterisk != -1) { + if (allowWildcard == false) { + throw new IllegalArgumentException("Application names may not contain '*' (found '" + application + "')"); + } + if(application.equals("*")) { + // this is allowed and short-circuiting here makes the later validation simpler + return; + } + if (asterisk != application.length() - 1) { + throw new IllegalArgumentException("Application name patterns only support trailing wildcards (found '" + application + + "')"); + } + } + if (WHITESPACE.matcher(application).find()) { + throw new IllegalArgumentException("Application names may not contain whitespace (found '" + application + "')"); + } + + final String[] parts = application.split("[_-]", 2); + String prefix = parts[0]; + if (prefix.endsWith("*")) { + prefix = prefix.substring(0, prefix.length() - 1); + } + if (VALID_APPLICATION_PREFIX.matcher(prefix).matches() == false) { + throw new IllegalArgumentException("An application name prefix must match the pattern " + VALID_APPLICATION_PREFIX.pattern() + + " (found '" + prefix + "')"); + } + if (prefix.length() < 3 && asterisk == -1) { + throw new IllegalArgumentException("An application name prefix must be at least 3 characters long (found '" + prefix + "')"); + } + + if (parts.length > 1) { + final String suffix = parts[1]; + if (Strings.validFileName(suffix) == false) { + throw new IllegalArgumentException("An application name suffix may not contain any of the characters '" + + Strings.collectionToDelimitedString(Strings.INVALID_FILENAME_CHARS, "") + "' (found '" + suffix + "')"); + } + } + } + + /** + * Validate that the provided privilege name is valid, and throws an exception otherwise + * + * @throws IllegalArgumentException if the name is not valid + */ + public static void validatePrivilegeName(String name) { + if (isValidPrivilegeName(name) == false) { + throw new IllegalArgumentException("Application privilege names must match the pattern " + VALID_NAME.pattern() + + " (found '" + name + "')"); + } + } + + private static boolean isValidPrivilegeName(String name) { + return VALID_NAME.matcher(name).matches(); + } + + /** + * Validate that the provided name is a valid privilege name or action name, and throws an exception otherwise + * + * @throws IllegalArgumentException if the name is not valid + */ + public static void validatePrivilegeOrActionName(String name) { + if (VALID_NAME_OR_ACTION.matcher(name).matches() == false) { + throw new IllegalArgumentException("Application privilege names and actions must match the pattern " + + VALID_NAME_OR_ACTION.pattern() + " (found '" + name + "')"); + } + } + + /** + * Finds or creates an application privileges with the provided names. + * Each element in {@code name} may be the name of a stored privilege (to be resolved from {@code stored}, or a bespoke action pattern. + */ + public static ApplicationPrivilege get(String application, Set name, Collection stored) { + if (name.isEmpty()) { + return NONE.apply(application); + } else { + Map lookup = stored.stream() + .filter(apd -> apd.getApplication().equals(application)) + .collect(Collectors.toMap(ApplicationPrivilegeDescriptor::getName, Function.identity())); + return resolve(application, name, lookup); + } + } + + private static ApplicationPrivilege resolve(String application, Set names, Map lookup) { + final int size = names.size(); + if (size == 0) { + throw new IllegalArgumentException("empty set should not be used"); + } + + Set actions = new HashSet<>(); + Set patterns = new HashSet<>(); + for (String name : names) { + if (isValidPrivilegeName(name)) { + ApplicationPrivilegeDescriptor descriptor = lookup.get(name); + if (descriptor != null) { + patterns.addAll(descriptor.getActions()); + } + } else { + actions.add(name); + } + } + + patterns.addAll(actions); + return new ApplicationPrivilege(application, names, patterns.toArray(new String[patterns.size()])); + } + + @Override + public String toString() { + return application + ":" + super.toString() + "(" + Strings.arrayToCommaDelimitedString(patterns) + ")"; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + Objects.hashCode(application); + result = 31 * result + Arrays.hashCode(patterns); + return result; + } + + @Override + public boolean equals(Object o) { + return super.equals(o) + && Objects.equals(this.application, ((ApplicationPrivilege) o).application) + && Arrays.equals(this.patterns, ((ApplicationPrivilege) o).patterns); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptor.java new file mode 100644 index 0000000000000..85d6aad3e3560 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptor.java @@ -0,0 +1,194 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * An {@code ApplicationPrivilegeDescriptor} is a representation of a stored {@link ApplicationPrivilege}. + * A user (via a role) can be granted an application privilege by name (e.g. ("myapp", "read"). + * In general, this privilege name will correspond to a pre-defined {@link ApplicationPrivilegeDescriptor}, which then + * is used to determine the set of actions granted by the privilege. + */ +public class ApplicationPrivilegeDescriptor implements ToXContentObject, Writeable { + + public static final String DOC_TYPE_VALUE = "application-privilege"; + + private static final ObjectParser PARSER = new ObjectParser<>(DOC_TYPE_VALUE, Builder::new); + + static { + PARSER.declareString(Builder::applicationName, Fields.APPLICATION); + PARSER.declareString(Builder::privilegeName, Fields.NAME); + PARSER.declareStringArray(Builder::actions, Fields.ACTIONS); + PARSER.declareObject(Builder::metadata, (parser, context) -> parser.map(), Fields.METADATA); + PARSER.declareField((parser, builder, allowType) -> builder.type(parser.text(), allowType), Fields.TYPE, + ObjectParser.ValueType.STRING); + } + + private String application; + private String name; + private Set actions; + private Map metadata; + + public ApplicationPrivilegeDescriptor(String application, String name, Set actions, Map metadata) { + this.application = Objects.requireNonNull(application); + this.name = Objects.requireNonNull(name); + this.actions = Collections.unmodifiableSet(actions); + this.metadata = Collections.unmodifiableMap(metadata); + } + + public ApplicationPrivilegeDescriptor(StreamInput input) throws IOException { + this.application = input.readString(); + this.name = input.readString(); + this.actions = Collections.unmodifiableSet(input.readSet(StreamInput::readString)); + this.metadata = Collections.unmodifiableMap(input.readMap()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(application); + out.writeString(name); + out.writeCollection(actions, StreamOutput::writeString); + out.writeMap(metadata); + } + + public String getApplication() { + return application; + } + + public String getName() { + return name; + } + + public Set getActions() { + return actions; + } + + public Map getMetadata() { + return metadata; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return toXContent(builder, false); + } + + public XContentBuilder toXContent(XContentBuilder builder, boolean includeTypeField) throws IOException { + builder.startObject() + .field(Fields.APPLICATION.getPreferredName(), application) + .field(Fields.NAME.getPreferredName(), name) + .field(Fields.ACTIONS.getPreferredName(), actions) + .field(Fields.METADATA.getPreferredName(), metadata); + if (includeTypeField) { + builder.field(Fields.TYPE.getPreferredName(), DOC_TYPE_VALUE); + } + return builder.endObject(); + } + + /** + * Construct a new {@link ApplicationPrivilegeDescriptor} from XContent. + * + * @param defaultApplication The application name to use if none is specified in the XContent body + * @param defaultName The privilege name to use if none is specified in the XContent body + * @param allowType If true, accept a "type" field (for which the value must match {@link #DOC_TYPE_VALUE}); + */ + public static ApplicationPrivilegeDescriptor parse(XContentParser parser, String defaultApplication, String defaultName, + boolean allowType) throws IOException { + final Builder builder = PARSER.parse(parser, allowType); + if (builder.applicationName == null) { + builder.applicationName(defaultApplication); + } + if (builder.privilegeName == null) { + builder.privilegeName(defaultName); + } + return builder.build(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final ApplicationPrivilegeDescriptor that = (ApplicationPrivilegeDescriptor) o; + return Objects.equals(this.application, that.application) && + Objects.equals(this.name, that.name) && + Objects.equals(this.actions, that.actions) && + Objects.equals(this.metadata, that.metadata); + } + + @Override + public int hashCode() { + return Objects.hash(application, name, actions, metadata); + } + + private static final class Builder { + private String applicationName; + private String privilegeName; + private Set actions = Collections.emptySet(); + private Map metadata = Collections.emptyMap(); + + private Builder applicationName(String applicationName) { + this.applicationName = applicationName; + return this; + } + + private Builder privilegeName(String privilegeName) { + this.privilegeName = privilegeName; + return this; + } + + private Builder actions(Collection actions) { + this.actions = new HashSet<>(actions); + return this; + } + + private Builder metadata(Map metadata) { + this.metadata = metadata; + return this; + } + + private Builder type(String type, boolean allowed) { + if (allowed == false) { + throw new IllegalStateException("Field " + Fields.TYPE.getPreferredName() + " cannot be specified here"); + } + if (ApplicationPrivilegeDescriptor.DOC_TYPE_VALUE.equals(type) == false) { + throw new IllegalStateException("XContent has wrong " + Fields.TYPE.getPreferredName() + " field " + type); + } + return this; + } + + private ApplicationPrivilegeDescriptor build() { + return new ApplicationPrivilegeDescriptor(applicationName, privilegeName, actions, metadata); + } + } + + public interface Fields { + ParseField APPLICATION = new ParseField("application"); + ParseField NAME = new ParseField("name"); + ParseField ACTIONS = new ParseField("actions"); + ParseField METADATA = new ParseField("metadata"); + ParseField TYPE = new ParseField("type"); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilege.java new file mode 100644 index 0000000000000..dd89c2bda705d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilege.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.transport.TransportRequest; + +import java.io.IOException; +import java.util.Collection; +import java.util.function.Predicate; + +/** + * A ConditionalClusterPrivilege is a composition of a {@link ClusterPrivilege} (that determines which actions may be executed) + * with a {@link Predicate} for a {@link TransportRequest} (that determines which requests may be executed). + * The a given execution of an action is considered to be permitted if both the action and the request are permitted. + */ +public interface ConditionalClusterPrivilege extends NamedWriteable, ToXContentFragment { + + /** + * The category under which this privilege should be rendered when output as XContent. + */ + Category getCategory(); + + /** + * The action-level privilege that is required by this conditional privilege. + */ + ClusterPrivilege getPrivilege(); + + /** + * The request-level privilege (as a {@link Predicate}) that is required by this conditional privilege. + */ + Predicate getRequestPredicate(); + + /** + * A {@link ConditionalClusterPrivilege} should generate a fragment of {@code XContent}, which consists of + * a single field name, followed by its value (which may be an object, an array, or a simple value). + */ + @Override + XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException; + + /** + * Categories exist for to segment privileges for the purposes of rendering to XContent. + * {@link ConditionalClusterPrivileges#toXContent(XContentBuilder, Params, Collection)} builds one XContent + * object for a collection of {@link ConditionalClusterPrivilege} instances, with the top level fields built + * from the categories. + */ + enum Category { + APPLICATION(new ParseField("application")); + + public final ParseField field; + + Category(ParseField field) { + this.field = field; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java new file mode 100644 index 0000000000000..c068c77781b14 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivileges.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.action.privilege.ApplicationPrivilegesRequest; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege.Category; +import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.security.xcontent.XContentUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Predicate; + +/** + * Static utility class for working with {@link ConditionalClusterPrivilege} instances + */ +public final class ConditionalClusterPrivileges { + + public static final ConditionalClusterPrivilege[] EMPTY_ARRAY = new ConditionalClusterPrivilege[0]; + + private ConditionalClusterPrivileges() { + } + + /** + * Utility method to read an array of {@link ConditionalClusterPrivilege} objects from a {@link StreamInput} + */ + public static ConditionalClusterPrivilege[] readArray(StreamInput in) throws IOException { + return in.readArray(in1 -> + in1.readNamedWriteable(ConditionalClusterPrivilege.class), ConditionalClusterPrivilege[]::new); + } + + /** + * Utility method to write an array of {@link ConditionalClusterPrivilege} objects to a {@link StreamOutput} + */ + public static void writeArray(StreamOutput out, ConditionalClusterPrivilege[] privileges) throws IOException { + out.writeArray((out1, value) -> out1.writeNamedWriteable(value), privileges); + } + + /** + * Writes a single object value to the {@code builder} that contains each of the provided privileges. + * The privileges are grouped according to their {@link ConditionalClusterPrivilege#getCategory() categories} + */ + public static XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params, + Collection privileges) throws IOException { + builder.startObject(); + for (Category category : Category.values()) { + builder.startObject(category.field.getPreferredName()); + for (ConditionalClusterPrivilege privilege : privileges) { + if (category == privilege.getCategory()) { + privilege.toXContent(builder, params); + } + } + builder.endObject(); + } + return builder.endObject(); + } + + /** + * Read a list of privileges from the parser. The parser should be positioned at the + * {@link XContentParser.Token#START_OBJECT} token for the privileges value + */ + public static List parse(XContentParser parser) throws IOException { + List privileges = new ArrayList<>(); + + expectedToken(parser.currentToken(), parser, XContentParser.Token.START_OBJECT); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); + + expectFieldName(parser, Category.APPLICATION.field); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME); + + expectFieldName(parser, ManageApplicationPrivileges.Fields.MANAGE); + privileges.add(ManageApplicationPrivileges.parse(parser)); + expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT); + } + + return privileges; + } + + private static void expectedToken(XContentParser.Token read, XContentParser parser, XContentParser.Token expected) { + if (read != expected) { + throw new XContentParseException(parser.getTokenLocation(), + "failed to parse privilege. expected [" + expected + "] but found [" + read + "] instead"); + } + } + + private static void expectFieldName(XContentParser parser, ParseField... fields) throws IOException { + final String fieldName = parser.currentName(); + if (Arrays.stream(fields).anyMatch(pf -> pf.match(fieldName, parser.getDeprecationHandler())) == false) { + throw new XContentParseException(parser.getTokenLocation(), + "failed to parse privilege. expected " + (fields.length == 1 ? "field name" : "one of") + " [" + + Strings.arrayToCommaDelimitedString(fields) + "] but found [" + fieldName + "] instead"); + } + } + + /** + * The {@code ManageApplicationPrivileges} privilege is a {@link ConditionalClusterPrivilege} that grants the + * ability to execute actions related to the management of application privileges (Get, Put, Delete) for a subset + * of applications (identified by a wildcard-aware application-name). + */ + public static class ManageApplicationPrivileges implements ConditionalClusterPrivilege { + + private static final ClusterPrivilege PRIVILEGE = ClusterPrivilege.get( + Collections.singleton("cluster:admin/xpack/security/privilege/*") + ); + public static final String WRITEABLE_NAME = "manage-application-privileges"; + + private final Set applicationNames; + private final Predicate applicationPredicate; + private final Predicate requestPredicate; + + public ManageApplicationPrivileges(Set applicationNames) { + this.applicationNames = Collections.unmodifiableSet(applicationNames); + this.applicationPredicate = Automatons.predicate(applicationNames); + this.requestPredicate = request -> { + if (request instanceof ApplicationPrivilegesRequest) { + final ApplicationPrivilegesRequest privRequest = (ApplicationPrivilegesRequest) request; + return privRequest.getApplicationNames().stream().allMatch(application -> applicationPredicate.test(application)); + } + return false; + }; + } + + @Override + public Category getCategory() { + return Category.APPLICATION; + } + + @Override + public ClusterPrivilege getPrivilege() { + return PRIVILEGE; + } + + @Override + public Predicate getRequestPredicate() { + return this.requestPredicate; + } + + public Collection getApplicationNames() { + return Collections.unmodifiableCollection(this.applicationNames); + } + + @Override + public String getWriteableName() { + return WRITEABLE_NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(this.applicationNames, StreamOutput::writeString); + } + + public static ManageApplicationPrivileges createFrom(StreamInput in) throws IOException { + final Set applications = in.readSet(StreamInput::readString); + return new ManageApplicationPrivileges(applications); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field(Fields.MANAGE.getPreferredName(), + Collections.singletonMap(Fields.APPLICATIONS.getPreferredName(), applicationNames) + ); + } + + public static ManageApplicationPrivileges parse(XContentParser parser) throws IOException { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Fields.MANAGE); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Fields.APPLICATIONS); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_ARRAY); + final String[] applications = XContentUtils.readStringArray(parser, false); + expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT); + return new ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList(applications))); + } + + @Override + public String toString() { + return "{" + getCategory() + ":" + Fields.MANAGE.getPreferredName() + ":" + Fields.APPLICATIONS.getPreferredName() + "=" + + Strings.collectionToDelimitedString(applicationNames, ",") + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final ManageApplicationPrivileges that = (ManageApplicationPrivileges) o; + return this.applicationNames.equals(that.applicationNames); + } + + @Override + public int hashCode() { + return applicationNames.hashCode(); + } + + private interface Fields { + ParseField MANAGE = new ParseField("manage"); + ParseField APPLICATIONS = new ParseField("applications"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index aeb448faa9c1c..0c59343636553 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -9,6 +9,8 @@ import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkAction; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.UsernamesField; @@ -27,8 +29,11 @@ public class ReservedRolesStore { new String[] { "all" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build()}, - new String[] { "*" }, - MetadataUtils.DEFAULT_RESERVED_METADATA); + new RoleDescriptor.ApplicationResourcePrivileges[] { + RoleDescriptor.ApplicationResourcePrivileges.builder().application("*").privileges("*").resources("*").build() + }, + null, new String[] { "*" }, + MetadataUtils.DEFAULT_RESERVED_METADATA, Collections.emptyMap()); public static final Role SUPERUSER_ROLE = Role.builder(SUPERUSER_ROLE_DESCRIPTOR, null).build(); private static final Map RESERVED_ROLES = initializeReservedRoles(); @@ -43,7 +48,11 @@ private static Map initializeReservedRoles() { MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("kibana_user", new RoleDescriptor("kibana_user", null, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*").privileges("manage", "read", "index", "delete") - .build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .build() }, new RoleDescriptor.ApplicationResourcePrivileges[] { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana-.kibana").resources("*").privileges("all").build() }, + null, null, + MetadataUtils.DEFAULT_RESERVED_METADATA, null)) .put("monitoring_user", new RoleDescriptor("monitoring_user", new String[] { "cluster:monitor/main" }, new RoleDescriptor.IndicesPrivileges[] { @@ -70,13 +79,19 @@ private static Map initializeReservedRoles() { "kibana_dashboard_only_user", null, new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder() - .indices(".kibana*").privileges("read", "view_index_metadata").build() + RoleDescriptor.IndicesPrivileges.builder() + .indices(".kibana*").privileges("read", "view_index_metadata").build() }, - null, - MetadataUtils.DEFAULT_RESERVED_METADATA)) + new RoleDescriptor.ApplicationResourcePrivileges[] { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana-.kibana").resources("*").privileges("read").build() }, + null, null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null)) .put(KibanaUser.ROLE_NAME, new RoleDescriptor(KibanaUser.ROLE_NAME, - new String[] { "monitor", "manage_index_templates", MonitoringBulkAction.NAME, "manage_saml" }, + new String[] { + "monitor", "manage_index_templates", MonitoringBulkAction.NAME, "manage_saml", + }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*", ".reporting-*").privileges("all").build(), RoleDescriptor.IndicesPrivileges.builder() @@ -84,7 +99,9 @@ private static Map initializeReservedRoles() { RoleDescriptor.IndicesPrivileges.builder() .indices(".management-beats").privileges("create_index", "read", "write").build() }, - null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + null, + new ConditionalClusterPrivilege[] { new ManageApplicationPrivileges(Collections.singleton("kibana-*")) }, + null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) .put("logstash_system", new RoleDescriptor("logstash_system", new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("beats_admin", new RoleDescriptor("beats_admin", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java index 26c35db1fc92b..e1d3a2db8e952 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java @@ -10,6 +10,12 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequestBuilder; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesRequestBuilder; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequestBuilder; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequestBuilder; @@ -100,7 +106,6 @@ public SecurityClient(ElasticsearchClient client) { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public ClearRealmCacheRequestBuilder prepareClearRealmCache() { return new ClearRealmCacheRequestBuilder(client); } @@ -109,7 +114,6 @@ public ClearRealmCacheRequestBuilder prepareClearRealmCache() { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public void clearRealmCache(ClearRealmCacheRequest request, ActionListener listener) { client.execute(ClearRealmCacheAction.INSTANCE, request, listener); } @@ -118,7 +122,6 @@ public void clearRealmCache(ClearRealmCacheRequest request, ActionListener clearRealmCache(ClearRealmCacheRequest request) { return client.execute(ClearRealmCacheAction.INSTANCE, request); } @@ -170,7 +173,9 @@ public void hasPrivileges(HasPrivilegesRequest request, ActionListener list client.execute(PutRoleAction.INSTANCE, request, listener); } - /** Role Mappings */ + /** + * Role Mappings + */ public GetRoleMappingsRequestBuilder prepareGetRoleMappings(String... names) { return new GetRoleMappingsRequestBuilder(client, GetRoleMappingsAction.INSTANCE) @@ -278,6 +287,27 @@ public DeleteRoleMappingRequestBuilder prepareDeleteRoleMapping(String name) { .name(name); } + /* -- Application Privileges -- */ + public GetPrivilegesRequestBuilder prepareGetPrivileges(String applicationName, String[] privileges) { + return new GetPrivilegesRequestBuilder(client, GetPrivilegesAction.INSTANCE).application(applicationName).privileges(privileges); + } + + public PutPrivilegesRequestBuilder preparePutPrivilege(String applicationName, String privilegeName, + BytesReference bytesReference, XContentType xContentType) throws IOException { + return new PutPrivilegesRequestBuilder(client, PutPrivilegesAction.INSTANCE) + .source(applicationName, privilegeName, bytesReference, xContentType); + } + + public PutPrivilegesRequestBuilder preparePutPrivileges(BytesReference bytesReference, XContentType xContentType) throws IOException { + return new PutPrivilegesRequestBuilder(client, PutPrivilegesAction.INSTANCE).source(bytesReference, xContentType); + } + + public DeletePrivilegesRequestBuilder prepareDeletePrivileges(String applicationName, String[] privileges) { + return new DeletePrivilegesRequestBuilder(client, DeletePrivilegesAction.INSTANCE) + .application(applicationName) + .privileges(privileges); + } + public CreateTokenRequestBuilder prepareCreateToken() { return new CreateTokenRequestBuilder(client, CreateTokenAction.INSTANCE); } @@ -301,7 +331,7 @@ public SamlAuthenticateRequestBuilder prepareSamlAuthenticate(byte[] xmlContent, return builder; } - public void samlAuthenticate(SamlAuthenticateRequest request, ActionListener< SamlAuthenticateResponse> listener) { + public void samlAuthenticate(SamlAuthenticateRequest request, ActionListener listener) { client.execute(SamlAuthenticateAction.INSTANCE, request, listener); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index 36e0b8ddb009b..b11867f836507 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import java.util.ArrayList; import java.util.Arrays; @@ -25,9 +27,15 @@ public final class Automatons { + public static final Setting MAX_DETERMINIZED_STATES_SETTING = + Setting.intSetting("xpack.security.automata.max_determinized_states", 100000, DEFAULT_MAX_DETERMINIZED_STATES, + Setting.Property.NodeScope); public static final Automaton EMPTY = Automata.makeEmpty(); public static final Automaton MATCH_ALL = Automata.makeAnyString(); + // this value is not final since we allow it to be set at runtime + private static int maxDeterminizedStates = 100000; + static final char WILDCARD_STRING = '*'; // String equality with support for wildcards static final char WILDCARD_CHAR = '?'; // Char equality with support for wildcards static final char WILDCARD_ESCAPE = '\\'; // Escape character @@ -49,13 +57,12 @@ public static Automaton patterns(Collection patterns) { if (patterns.isEmpty()) { return EMPTY; } - Automaton automaton = null; + List automata = new ArrayList<>(patterns.size()); for (String pattern : patterns) { - final Automaton patternAutomaton = minimize(pattern(pattern), DEFAULT_MAX_DETERMINIZED_STATES); - automaton = automaton == null ? patternAutomaton : unionAndMinimize(Arrays.asList(automaton, patternAutomaton)); + final Automaton patternAutomaton = pattern(pattern); + automata.add(patternAutomaton); } - // the automaton is always minimized and deterministic - return automaton; + return unionAndMinimize(automata); } /** @@ -111,12 +118,12 @@ static Automaton wildcard(String text) { public static Automaton unionAndMinimize(Collection automata) { Automaton res = union(automata); - return minimize(res, DEFAULT_MAX_DETERMINIZED_STATES); + return minimize(res, maxDeterminizedStates); } public static Automaton minusAndMinimize(Automaton a1, Automaton a2) { - Automaton res = minus(a1, a2, DEFAULT_MAX_DETERMINIZED_STATES); - return minimize(res, DEFAULT_MAX_DETERMINIZED_STATES); + Automaton res = minus(a1, a2, maxDeterminizedStates); + return minimize(res, maxDeterminizedStates); } public static Predicate predicate(String... patterns) { @@ -131,8 +138,17 @@ public static Predicate predicate(Automaton automaton) { return predicate(automaton, "Predicate for " + automaton); } + public static void updateMaxDeterminizedStates(Settings settings) { + maxDeterminizedStates = MAX_DETERMINIZED_STATES_SETTING.get(settings); + } + + // accessor for testing + static int getMaxDeterminizedStates() { + return maxDeterminizedStates; + } + private static Predicate predicate(Automaton automaton, final String toString) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_MAX_DETERMINIZED_STATES); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates); return new Predicate() { @Override public boolean test(String s) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java index 84a643ae72dae..89279f4ea31cf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java @@ -76,7 +76,6 @@ public String index() { /** * Sets the index. */ - @SuppressWarnings("unchecked") public final Request index(String index) { this.index = index; return this; diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/src/main/resources/monitoring-beats.json index ed027387a49cc..07756ba2602f0 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-beats.json @@ -37,6 +37,49 @@ }, "state": { "properties": { + "beat": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "host": { + "properties": { + "architecture": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "os": { + "properties": { + "build": { + "type": "keyword" + }, + "family": { + "type": "keyword" + }, + "platform": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } + } + } + }, + "input": { + "properties": { + "count": { + "type": "long" + }, + "names": { + "type": "keyword" + } + } + }, "module": { "properties": { "count": { @@ -46,6 +89,26 @@ "type": "keyword" } } + }, + "output": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "service": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } } } }, diff --git a/x-pack/plugin/core/src/main/resources/security-index-template.json b/x-pack/plugin/core/src/main/resources/security-index-template.json index 778f44a93bf3a..dd17baf04740f 100644 --- a/x-pack/plugin/core/src/main/resources/security-index-template.json +++ b/x-pack/plugin/core/src/main/resources/security-index-template.json @@ -91,6 +91,41 @@ } } }, + "applications": { + "type": "object", + "properties": { + "application": { + "type": "keyword" + }, + "privileges": { + "type": "keyword" + }, + "resources": { + "type": "keyword" + } + } + }, + "application" : { + "type" : "keyword" + }, + "global": { + "type": "object", + "properties": { + "application": { + "type": "object", + "properties": { + "manage": { + "type": "object", + "properties": { + "applications": { + "type": "keyword" + } + } + } + } + } + } + }, "name" : { "type" : "keyword" }, @@ -103,6 +138,9 @@ "type" : { "type" : "keyword" }, + "actions" : { + "type" : "keyword" + }, "expiration_time" : { "type" : "date", "format" : "epoch_millis" diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTLSTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTLSTests.java index 588dbabb9db8a..754b398cd6c9d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTLSTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTLSTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import java.net.InetAddress; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java index 8e1c363c814da..06afd2f7ca382 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java @@ -9,6 +9,8 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.license.LicensesStatus; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import static org.elasticsearch.common.unit.TimeValue.timeValueHours; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java index 540dbd891bd9b..c397bd79e2885 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.license.LicensesStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; @@ -145,4 +146,4 @@ public void onFailure(Exception throwable) { } assertThat("remove license(s) failed", success.get(), equalTo(true)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java index a48132ef3d79d..aa372eb03562a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.license.LicensesStatus; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; @@ -230,4 +232,4 @@ public void testLicenseIsAcceptedWhenStartDateBeforeThanNow() throws Exception { assertThat(putLicenseResponse.isAcknowledged(), equalTo(true)); assertThat(putLicenseResponse.status(), equalTo(LicensesStatus.VALID)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/PutLicenseResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/PutLicenseResponseTests.java deleted file mode 100644 index d4b7900fa5bc8..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/PutLicenseResponseTests.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.sameInstance; - -public class PutLicenseResponseTests extends ESTestCase { - @SuppressWarnings("unchecked") - public void testSerialization() throws Exception { - boolean acknowledged = randomBoolean(); - LicensesStatus status = randomFrom(LicensesStatus.VALID, LicensesStatus.INVALID, LicensesStatus.EXPIRED); - Map ackMessages = randomAckMessages(); - - PutLicenseResponse response = new PutLicenseResponse(acknowledged, status, "", ackMessages); - - XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); - response.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); - - Map map = XContentHelper.convertToMap(BytesReference.bytes(contentBuilder), false, - contentBuilder.contentType()).v2(); - assertThat(map.containsKey("acknowledged"), equalTo(true)); - boolean actualAcknowledged = (boolean) map.get("acknowledged"); - assertThat(actualAcknowledged, equalTo(acknowledged)); - - assertThat(map.containsKey("license_status"), equalTo(true)); - String actualStatus = (String) map.get("license_status"); - assertThat(actualStatus, equalTo(status.name().toLowerCase(Locale.ROOT))); - - assertThat(map.containsKey("acknowledge"), equalTo(true)); - Map> actualAckMessages = (Map>) map.get("acknowledge"); - assertTrue(actualAckMessages.containsKey("message")); - actualAckMessages.remove("message"); - assertThat(actualAckMessages.keySet(), equalTo(ackMessages.keySet())); - for (Map.Entry> entry : actualAckMessages.entrySet()) { - assertArrayEquals(entry.getValue().toArray(), ackMessages.get(entry.getKey())); - } - } - - public void testStreamSerialization() throws IOException { - boolean acknowledged = randomBoolean(); - LicensesStatus status = randomFrom(LicensesStatus.VALID, LicensesStatus.INVALID, LicensesStatus.EXPIRED); - Map ackMessages = randomAckMessages(); - - // write the steam so that we can attempt to read it back - BytesStreamOutput output = new BytesStreamOutput(); - - PutLicenseResponse response = new PutLicenseResponse(acknowledged, status, "", ackMessages); - // write it out - response.writeTo(output); - - StreamInput input = output.bytes().streamInput(); - - // read it back in - response.readFrom(input); - - assertThat(response.isAcknowledged(), equalTo(acknowledged)); - assertThat(response.status(), equalTo(status)); - assertThat(response.acknowledgeMessages(), not(sameInstance(ackMessages))); - assertThat(response.acknowledgeMessages().size(), equalTo(ackMessages.size())); - - for (String key : ackMessages.keySet()) { - assertArrayEquals(ackMessages.get(key), response.acknowledgeMessages().get(key)); - } - } - - private static Map randomAckMessages() { - int nFeatures = randomIntBetween(1, 5); - - Map ackMessages = new HashMap<>(); - - for (int i = 0; i < nFeatures; i++) { - String feature = randomAlphaOfLengthBetween(9, 15); - int nMessages = randomIntBetween(1, 5); - String[] messages = new String[nMessages]; - for (int j = 0; j < nMessages; j++) { - messages[j] = randomAlphaOfLengthBetween(10, 30); - } - ackMessages.put(feature, messages); - } - - return ackMessages; - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartBasicLicenseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartBasicLicenseTests.java index fc8e25e3ccca9..20009dba41c04 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartBasicLicenseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartBasicLicenseTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.license; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -67,12 +68,14 @@ public void testStartBasicLicense() throws Exception { } RestClient restClient = getRestClient(); - Response response = restClient.performRequest("GET", "/_xpack/license/basic_status"); + Response response = restClient.performRequest(new Request("GET", "/_xpack/license/basic_status")); String body = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals("{\"eligible_to_start_basic\":true}", body); - Response response2 = restClient.performRequest("POST", "/_xpack/license/start_basic?acknowledge=true"); + Request ackRequest = new Request("POST", "/_xpack/license/start_basic"); + ackRequest.addParameter("acknowledge", "true"); + Response response2 = restClient.performRequest(ackRequest); String body2 = Streams.copyToString(new InputStreamReader(response2.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response2.getStatusLine().getStatusCode()); assertTrue(body2.contains("\"acknowledged\":true")); @@ -86,20 +89,19 @@ public void testStartBasicLicense() throws Exception { long expirationMillis = licensingClient.prepareGetLicense().get().license().expiryDate(); assertEquals(LicenseService.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS, expirationMillis); - Response response3 = restClient.performRequest("GET", "/_xpack/license"); + Response response3 = restClient.performRequest(new Request("GET", "/_xpack/license")); String body3 = Streams.copyToString(new InputStreamReader(response3.getEntity().getContent(), StandardCharsets.UTF_8)); assertTrue(body3.contains("\"type\" : \"basic\"")); assertFalse(body3.contains("expiry_date")); assertFalse(body3.contains("expiry_date_in_millis")); - - Response response4 = restClient.performRequest("GET", "/_xpack/license/basic_status"); + Response response4 = restClient.performRequest(new Request("GET", "/_xpack/license/basic_status")); String body4 = Streams.copyToString(new InputStreamReader(response4.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response3.getStatusLine().getStatusCode()); assertEquals("{\"eligible_to_start_basic\":false}", body4); ResponseException ex = expectThrows(ResponseException.class, - () -> restClient.performRequest("POST", "/_xpack/license/start_basic")); + () -> restClient.performRequest(new Request("POST", "/_xpack/license/start_basic"))); Response response5 = ex.getResponse(); String body5 = Streams.copyToString(new InputStreamReader(response5.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(403, response5.getStatusLine().getStatusCode()); @@ -118,7 +120,7 @@ public void testUnacknowledgedStartBasicLicense() throws Exception { assertEquals("trial", getLicenseResponse.license().type()); }); - Response response2 = getRestClient().performRequest("POST", "/_xpack/license/start_basic"); + Response response2 = getRestClient().performRequest(new Request("POST", "/_xpack/license/start_basic")); String body2 = Streams.copyToString(new InputStreamReader(response2.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response2.getStatusLine().getStatusCode()); assertTrue(body2.contains("\"acknowledged\":false")); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartTrialLicenseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartTrialLicenseTests.java index 24ba4bd2bd61e..9c07965a7ec1b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartTrialLicenseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/StartTrialLicenseTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.license; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -54,13 +55,13 @@ public void testStartTrial() throws Exception { ensureStartingWithBasic(); RestClient restClient = getRestClient(); - Response response = restClient.performRequest("GET", "/_xpack/license/trial_status"); + Response response = restClient.performRequest(new Request("GET", "/_xpack/license/trial_status")); String body = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals("{\"eligible_to_start_trial\":true}", body); // Test that starting will fail without acknowledgement - Response response2 = restClient.performRequest("POST", "/_xpack/license/start_trial"); + Response response2 = restClient.performRequest(new Request("POST", "/_xpack/license/start_trial")); String body2 = Streams.copyToString(new InputStreamReader(response2.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response2.getStatusLine().getStatusCode()); assertTrue(body2.contains("\"trial_was_started\":false")); @@ -74,7 +75,10 @@ public void testStartTrial() throws Exception { String type = randomFrom(LicenseService.VALID_TRIAL_TYPES); - Response response3 = restClient.performRequest("POST", "/_xpack/license/start_trial?acknowledge=true&type=" + type); + Request ackRequest = new Request("POST", "/_xpack/license/start_trial"); + ackRequest.addParameter("acknowledge", "true"); + ackRequest.addParameter("type", type); + Response response3 = restClient.performRequest(ackRequest); String body3 = Streams.copyToString(new InputStreamReader(response3.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response3.getStatusLine().getStatusCode()); assertTrue(body3.contains("\"trial_was_started\":true")); @@ -86,15 +90,17 @@ public void testStartTrial() throws Exception { assertEquals(type, postTrialLicenseResponse.license().type()); }); - Response response4 = restClient.performRequest("GET", "/_xpack/license/trial_status"); + Response response4 = restClient.performRequest(new Request("GET", "/_xpack/license/trial_status")); String body4 = Streams.copyToString(new InputStreamReader(response4.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(200, response4.getStatusLine().getStatusCode()); assertEquals("{\"eligible_to_start_trial\":false}", body4); String secondAttemptType = randomFrom(LicenseService.VALID_TRIAL_TYPES); - ResponseException ex = expectThrows(ResponseException.class, - () -> restClient.performRequest("POST", "/_xpack/license/start_trial?acknowledge=true&type=" + secondAttemptType)); + Request startTrialWhenStartedRequest = new Request("POST", "/_xpack/license/start_trial"); + startTrialWhenStartedRequest.addParameter("acknowledge", "true"); + startTrialWhenStartedRequest.addParameter("type", secondAttemptType); + ResponseException ex = expectThrows(ResponseException.class, () -> restClient.performRequest(startTrialWhenStartedRequest)); Response response5 = ex.getResponse(); String body5 = Streams.copyToString(new InputStreamReader(response5.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(403, response5.getStatusLine().getStatusCode()); @@ -105,8 +111,9 @@ public void testStartTrial() throws Exception { public void testInvalidType() throws Exception { ensureStartingWithBasic(); - ResponseException ex = expectThrows(ResponseException.class, () -> - getRestClient().performRequest("POST", "/_xpack/license/start_trial?type=basic")); + Request request = new Request("POST", "/_xpack/license/start_trial"); + request.addParameter("type", "basic"); + ResponseException ex = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); Response response = ex.getResponse(); String body = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)); assertEquals(400, response.getStatusLine().getStatusCode()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java index 3b7906ae56747..afa1a8d679695 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java @@ -20,6 +20,8 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.licensor.LicenseSigner; +import org.elasticsearch.protocol.xpack.license.LicensesStatus; +import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import org.hamcrest.MatcherAssert; import org.joda.time.format.DateTimeFormatter; import org.junit.Assert; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/test/TestMatchers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/test/TestMatchers.java index 9fd1d64323eb5..2a9041575df2b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/test/TestMatchers.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/test/TestMatchers.java @@ -5,16 +5,16 @@ */ package org.elasticsearch.test; +import org.hamcrest.CustomMatcher; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; + import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.util.function.Predicate; import java.util.regex.Pattern; -import org.hamcrest.CustomMatcher; -import org.hamcrest.Matcher; -import org.hamcrest.Matchers; - public class TestMatchers extends Matchers { public static Matcher pathExists(Path path, LinkOption... options) { @@ -26,6 +26,19 @@ public boolean matches(Object item) { }; } + public static Matcher> predicateMatches(T value) { + return new CustomMatcher>("Matches " + value) { + @Override + public boolean matches(Object item) { + if (Predicate.class.isInstance(item)) { + return ((Predicate) item).test(value); + } else { + return false; + } + } + }; + } + public static Matcher matchesPattern(String regex) { return matchesPattern(Pattern.compile(regex)); } @@ -34,16 +47,17 @@ public static Matcher matchesPattern(Pattern pattern) { return predicate("Matches " + pattern.pattern(), String.class, pattern.asPredicate()); } - private static Matcher predicate(String description, Class type, Predicate stringPredicate) { + private static Matcher predicate(String description, Class type, Predicate predicate) { return new CustomMatcher(description) { @Override public boolean matches(Object item) { if (type.isInstance(item)) { - return stringPredicate.test(type.cast(item)); + return predicate.test(type.cast(item)); } else { return false; } } }; } + } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java index 4be0cefe525e6..50500f0e9399e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java @@ -37,7 +37,6 @@ private void deleteAllDatafeeds() throws IOException { final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds"); datafeedsRequest.addParameter("filter_path", "datafeeds"); final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); - @SuppressWarnings("unchecked") final List> datafeeds = (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); if (datafeeds == null) { @@ -45,20 +44,11 @@ private void deleteAllDatafeeds() throws IOException { } try { - int statusCode = adminClient.performRequest("POST", "/_xpack/ml/datafeeds/_all/_stop") - .getStatusLine().getStatusCode(); - if (statusCode != 200) { - logger.error("Got status code " + statusCode + " when stopping datafeeds"); - } + adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop")); } catch (Exception e1) { logger.warn("failed to stop all datafeeds. Forcing stop", e1); try { - int statusCode = adminClient - .performRequest("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true") - .getStatusLine().getStatusCode(); - if (statusCode != 200) { - logger.error("Got status code " + statusCode + " when stopping datafeeds"); - } + adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true")); } catch (Exception e2) { logger.warn("Force-closing all data feeds failed", e2); } @@ -68,10 +58,7 @@ private void deleteAllDatafeeds() throws IOException { for (Map datafeed : datafeeds) { String datafeedId = (String) datafeed.get("datafeed_id"); - int statusCode = adminClient.performRequest("DELETE", "/_xpack/ml/datafeeds/" + datafeedId).getStatusLine().getStatusCode(); - if (statusCode != 200) { - logger.error("Got status code " + statusCode + " when deleting datafeed " + datafeedId); - } + adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId)); } } @@ -87,17 +74,11 @@ private void deleteAllJobs() throws IOException { } try { - int statusCode = adminClient - .performRequest("POST", "/_xpack/ml/anomaly_detectors/_all/_close") - .getStatusLine().getStatusCode(); - if (statusCode != 200) { - logger.error("Got status code " + statusCode + " when closing all jobs"); - } + adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close")); } catch (Exception e1) { logger.warn("failed to close all jobs. Forcing closed", e1); try { - adminClient.performRequest("POST", - "/_xpack/ml/anomaly_detectors/_all/_close?force=true"); + adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true")); } catch (Exception e2) { logger.warn("Force-closing all jobs failed", e2); } @@ -107,10 +88,7 @@ private void deleteAllJobs() throws IOException { for (Map jobConfig : jobConfigs) { String jobId = (String) jobConfig.get("job_id"); - int statusCode = adminClient.performRequest("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId).getStatusLine().getStatusCode(); - if (statusCode != 200) { - logger.error("Got status code " + statusCode + " when deleting job " + jobId); - } + adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId)); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java index ef285b87cf17a..fc2ee52dc41ba 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java @@ -147,7 +147,6 @@ public void testToXContentOrdersDuplicateInputFields() throws IOException { assertEquals(Arrays.asList("yes", "no"), serialisedSpoilerFieldValues); } - @SuppressWarnings("unchecked") public void testToXContentDoesNotIncludesReservedWordInputFields() throws IOException { AnomalyRecord record = createTestInstance(); record.setByFieldName(AnomalyRecord.BUCKET_SPAN.getPreferredName()); @@ -157,7 +156,7 @@ public void testToXContentDoesNotIncludesReservedWordInputFields() throws IOExce XContentParser parser = createParser(XContentType.JSON.xContent(), bytes); Object value = parser.map().get(AnomalyRecord.BUCKET_SPAN.getPreferredName()); assertNotEquals("bar", value); - assertEquals((Long)record.getBucketSpan(), (Long)value); + assertEquals(record.getBucketSpan(), value); } public void testId() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java index ae171f138cf46..3eb9aa79b4bc8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.rollup; import org.apache.http.HttpStatus; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -17,7 +18,6 @@ import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -35,8 +35,9 @@ public static void clearRollupMetadata(RestClient adminClient) throws Exception private static void waitForPendingTasks(RestClient adminClient) throws Exception { ESTestCase.assertBusy(() -> { try { - Response response = adminClient.performRequest("GET", "/_cat/tasks", - Collections.singletonMap("detailed", "true")); + Request request = new Request("GET", "/_cat/tasks"); + request.addParameter("detailed", "true"); + Response response = adminClient.performRequest(request); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { try (BufferedReader responseReader = new BufferedReader( new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { @@ -63,9 +64,8 @@ private static void waitForPendingTasks(RestClient adminClient) throws Exception @SuppressWarnings("unchecked") private static void deleteAllJobs(RestClient adminClient) throws Exception { - Response response = adminClient.performRequest("GET", "/_xpack/rollup/job/_all"); + Response response = adminClient.performRequest(new Request("GET", "/_xpack/rollup/job/_all")); Map jobs = ESRestTestCase.entityAsMap(response); - @SuppressWarnings("unchecked") List> jobConfigs = (List>) XContentMapValues.extractValue("jobs", jobs); @@ -76,7 +76,7 @@ private static void deleteAllJobs(RestClient adminClient) throws Exception { for (Map jobConfig : jobConfigs) { String jobId = (String) ((Map) jobConfig.get("config")).get("id"); try { - response = adminClient.performRequest("DELETE", "/_xpack/rollup/job/" + jobId); + response = adminClient.performRequest(new Request("DELETE", "/_xpack/rollup/job/" + jobId)); } catch (Exception e) { // ok } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequestTests.java new file mode 100644 index 0000000000000..03232181f930e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesRequestTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class DeletePrivilegesRequestTests extends ESTestCase { + + public void testSerialization() throws IOException { + final DeletePrivilegesRequest original = new DeletePrivilegesRequest( + randomAlphaOfLengthBetween(3, 8), generateRandomStringArray(5, randomIntBetween(3, 8), false, false)); + original.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); + + final BytesStreamOutput output = new BytesStreamOutput(); + original.writeTo(output); + output.flush(); + final DeletePrivilegesRequest copy = new DeletePrivilegesRequest(); + copy.readFrom(output.bytes().streamInput()); + assertThat(copy.application(), equalTo(original.application())); + assertThat(copy.privileges(), equalTo(original.privileges())); + assertThat(copy.getRefreshPolicy(), equalTo(original.getRefreshPolicy())); + } + + public void testValidation() { + assertValidationFailure(new DeletePrivilegesRequest(null, null), "application name", "privileges"); + assertValidationFailure(new DeletePrivilegesRequest("", null), "application name", "privileges"); + assertValidationFailure(new DeletePrivilegesRequest(null, new String[0]), "application name", "privileges"); + assertValidationFailure(new DeletePrivilegesRequest("", new String[0]), "application name", "privileges"); + assertValidationFailure(new DeletePrivilegesRequest(null, new String[]{"all"}), "application name"); + assertValidationFailure(new DeletePrivilegesRequest("", new String[]{"all"}), "application name"); + assertValidationFailure(new DeletePrivilegesRequest("app", null), "privileges"); + assertValidationFailure(new DeletePrivilegesRequest("app", new String[0]), "privileges"); + assertValidationFailure(new DeletePrivilegesRequest("app", new String[]{""}), "privileges"); + + assertThat(new DeletePrivilegesRequest("app", new String[]{"all"}).validate(), nullValue()); + assertThat(new DeletePrivilegesRequest("app", new String[]{"all", "some"}).validate(), nullValue()); + } + + private void assertValidationFailure(DeletePrivilegesRequest request, String... messages) { + final ActionRequestValidationException exception = request.validate(); + assertThat(exception, notNullValue()); + for (String message : messages) { + assertThat(exception.validationErrors(), Matchers.hasItem(containsString(message))); + } + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesResponseTests.java new file mode 100644 index 0000000000000..d490177c0cec4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/DeletePrivilegesResponseTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; + +import static org.hamcrest.Matchers.equalTo; + +public class DeletePrivilegesResponseTests extends ESTestCase { + + public void testSerialization() throws IOException { + final DeletePrivilegesResponse original = new DeletePrivilegesResponse( + Arrays.asList(generateRandomStringArray(5, randomIntBetween(3, 8), false, true))); + + final BytesStreamOutput output = new BytesStreamOutput(); + original.writeTo(output); + output.flush(); + final DeletePrivilegesResponse copy = new DeletePrivilegesResponse(); + copy.readFrom(output.bytes().streamInput()); + assertThat(copy.found(), equalTo(original.found())); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequestTests.java new file mode 100644 index 0000000000000..4d67b82dfd4c0 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesRequestTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class GetPrivilegesRequestTests extends ESTestCase { + + public void testSerialization() throws IOException { + final GetPrivilegesRequest original = new GetPrivilegesRequest(); + if (randomBoolean()) { + original.application(randomAlphaOfLengthBetween(3, 8)); + } + original.privileges(generateRandomStringArray(3, 5, false, true)); + + final BytesStreamOutput out = new BytesStreamOutput(); + original.writeTo(out); + + final GetPrivilegesRequest copy = new GetPrivilegesRequest(); + copy.readFrom(out.bytes().streamInput()); + + assertThat(original.application(), Matchers.equalTo(copy.application())); + assertThat(original.privileges(), Matchers.equalTo(copy.privileges())); + } + + public void testValidation() { + assertThat(request(null).validate(), nullValue()); + assertThat(request(null, "all").validate(), nullValue()); + assertThat(request(null, "read", "write").validate(), nullValue()); + assertThat(request("my_app").validate(), nullValue()); + assertThat(request("my_app", "all").validate(), nullValue()); + assertThat(request("my_app", "read", "write").validate(), nullValue()); + final ActionRequestValidationException exception = request("my_app", ((String[]) null)).validate(); + assertThat(exception, notNullValue()); + assertThat(exception.validationErrors(), containsInAnyOrder("privileges cannot be null")); + } + + private GetPrivilegesRequest request(String application, String... privileges) { + final GetPrivilegesRequest request = new GetPrivilegesRequest(); + request.application(application); + request.privileges(privileges); + return request; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponseTests.java new file mode 100644 index 0000000000000..50c247967a780 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/GetPrivilegesResponseTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.Collections; +import java.util.Locale; + +public class GetPrivilegesResponseTests extends ESTestCase { + + public void testSerialization() throws IOException { + ApplicationPrivilegeDescriptor[] privileges = randomArray(6, ApplicationPrivilegeDescriptor[]::new, () -> + new ApplicationPrivilegeDescriptor( + randomAlphaOfLengthBetween(3, 8).toLowerCase(Locale.ROOT), + randomAlphaOfLengthBetween(3, 8).toLowerCase(Locale.ROOT), + Sets.newHashSet(randomArray(3, String[]::new, () -> randomAlphaOfLength(3).toLowerCase(Locale.ROOT) + "/*")), + Collections.emptyMap() + ) + ); + final GetPrivilegesResponse original = new GetPrivilegesResponse(privileges); + + final BytesStreamOutput out = new BytesStreamOutput(); + original.writeTo(out); + + final GetPrivilegesResponse copy = new GetPrivilegesResponse(); + copy.readFrom(out.bytes().streamInput()); + + assertThat(copy.privileges(), Matchers.equalTo(original.privileges())); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestTests.java new file mode 100644 index 0000000000000..e258efd04c5ec --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Locale; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.notNullValue; + +public class PutPrivilegesRequestTests extends ESTestCase { + + public void testSerialization() throws IOException { + final PutPrivilegesRequest original = request(randomArray(8, ApplicationPrivilegeDescriptor[]::new, + () -> new ApplicationPrivilegeDescriptor( + randomAlphaOfLengthBetween(3, 8).toLowerCase(Locale.ROOT), + randomAlphaOfLengthBetween(3, 8).toLowerCase(Locale.ROOT), + Sets.newHashSet(randomArray(3, String[]::new, () -> randomAlphaOfLength(3).toLowerCase(Locale.ROOT) + "/*")), + Collections.emptyMap() + ) + )); + original.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); + + final BytesStreamOutput out = new BytesStreamOutput(); + original.writeTo(out); + + final PutPrivilegesRequest copy = new PutPrivilegesRequest(); + copy.readFrom(out.bytes().streamInput()); + + assertThat(original.getPrivileges(), Matchers.equalTo(copy.getPrivileges())); + assertThat(original.getRefreshPolicy(), Matchers.equalTo(copy.getRefreshPolicy())); + } + + public void testValidation() { + // wildcard app name + final ApplicationPrivilegeDescriptor wildcardApp = descriptor("*", "all", "*"); + assertValidationFailure(request(wildcardApp), "Application names may not contain"); + + // invalid priv names + final ApplicationPrivilegeDescriptor spaceName = descriptor("app", "r e a d", "read/*"); + final ApplicationPrivilegeDescriptor numericName = descriptor("app", "7346", "read/*"); + assertValidationFailure(request(spaceName), "Application privilege names must match"); + assertValidationFailure(request(numericName), "Application privilege names must match"); + + // no actions + final ApplicationPrivilegeDescriptor nothing = descriptor("*", "nothing"); + assertValidationFailure(request(nothing), "Application privileges must have at least one action"); + + // reserved metadata + final ApplicationPrivilegeDescriptor reservedMetadata = new ApplicationPrivilegeDescriptor("app", "all", + Collections.emptySet(), Collections.singletonMap("_notAllowed", true) + ); + assertValidationFailure(request(reservedMetadata), "metadata keys may not start"); + + ApplicationPrivilegeDescriptor badAction = descriptor("app", "foo", randomFrom("data.read", "data_read", "data+read", "read")); + assertValidationFailure(request(badAction), "must contain one of"); + + // mixed + assertValidationFailure(request(wildcardApp, numericName, reservedMetadata, badAction), + "Application names may not contain", "Application privilege names must match", "metadata keys may not start", + "must contain one of"); + } + + private ApplicationPrivilegeDescriptor descriptor(String application, String name, String... actions) { + return new ApplicationPrivilegeDescriptor(application, name, Sets.newHashSet(actions), Collections.emptyMap()); + } + + private void assertValidationFailure(PutPrivilegesRequest request, String... messages) { + final ActionRequestValidationException exception = request.validate(); + assertThat(exception, notNullValue()); + for (String message : messages) { + assertThat(exception.validationErrors(), hasItem(containsString(message))); + } + } + + private PutPrivilegesRequest request(ApplicationPrivilegeDescriptor... privileges) { + final PutPrivilegesRequest original = new PutPrivilegesRequest(); + + original.setPrivileges(Arrays.asList(privileges)); + return original; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesResponseTests.java new file mode 100644 index 0000000000000..431d7f326ee88 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesResponseTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class PutPrivilegesResponseTests extends ESTestCase { + + public void testSerialization() throws IOException { + final int applicationCount = randomInt(3); + final Map> map = new HashMap<>(applicationCount); + for (int i = 0; i < applicationCount; i++) { + map.put(randomAlphaOfLengthBetween(3, 8), + Arrays.asList(generateRandomStringArray(5, 6, false, true)) + ); + } + final PutPrivilegesResponse original = new PutPrivilegesResponse(map); + + final BytesStreamOutput output = new BytesStreamOutput(); + original.writeTo(output); + output.flush(); + final PutPrivilegesResponse copy = new PutPrivilegesResponse(); + copy.readFrom(output.bytes().streamInput()); + assertThat(copy.created(), equalTo(original.created())); + assertThat(Strings.toString(copy), equalTo(Strings.toString(original))); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java new file mode 100644 index 0000000000000..ae458cbb2f5ed --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.ByteBufferStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.xpack.core.XPackClientPlugin; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PutRoleRequestTests extends ESTestCase { + + public void testValidationOfApplicationPrivileges() { + assertSuccessfulValidation(buildRequestWithApplicationPrivilege("app", new String[]{"read"}, new String[]{"*"})); + assertSuccessfulValidation(buildRequestWithApplicationPrivilege("app", new String[]{"action:login"}, new String[]{"/"})); + assertSuccessfulValidation(buildRequestWithApplicationPrivilege("*", new String[]{"data/read:user"}, new String[]{"user/123"})); + + // Fail + assertValidationError("privilege names and actions must match the pattern", + buildRequestWithApplicationPrivilege("app", new String[]{"in valid"}, new String[]{"*"})); + assertValidationError("An application name prefix must match the pattern", + buildRequestWithApplicationPrivilege("000", new String[]{"all"}, new String[]{"*"})); + assertValidationError("An application name prefix must match the pattern", + buildRequestWithApplicationPrivilege("%*", new String[]{"all"}, new String[]{"*"})); + } + + public void testSerialization() throws IOException { + final PutRoleRequest original = buildRandomRequest(); + + final BytesStreamOutput out = new BytesStreamOutput(); + original.writeTo(out); + + final PutRoleRequest copy = new PutRoleRequest(); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin(Settings.EMPTY).getNamedWriteables()); + StreamInput in = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), registry); + copy.readFrom(in); + + assertThat(copy.roleDescriptor(), equalTo(original.roleDescriptor())); + } + + public void testSerializationV63AndBefore() throws IOException { + final PutRoleRequest original = buildRandomRequest(); + + final BytesStreamOutput out = new BytesStreamOutput(); + final Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_3_2); + out.setVersion(version); + original.writeTo(out); + + final PutRoleRequest copy = new PutRoleRequest(); + final StreamInput in = out.bytes().streamInput(); + in.setVersion(version); + copy.readFrom(in); + + assertThat(copy.name(), equalTo(original.name())); + assertThat(copy.cluster(), equalTo(original.cluster())); + assertThat(copy.indices(), equalTo(original.indices())); + assertThat(copy.runAs(), equalTo(original.runAs())); + assertThat(copy.metadata(), equalTo(original.metadata())); + assertThat(copy.getRefreshPolicy(), equalTo(original.getRefreshPolicy())); + + assertThat(copy.applicationPrivileges(), iterableWithSize(0)); + assertThat(copy.conditionalClusterPrivileges(), arrayWithSize(0)); + } + + private void assertSuccessfulValidation(PutRoleRequest request) { + final ActionRequestValidationException exception = request.validate(); + assertThat(exception, nullValue()); + } + + private void assertValidationError(String message, PutRoleRequest request) { + final ActionRequestValidationException exception = request.validate(); + assertThat(exception, notNullValue()); + assertThat(exception.validationErrors(), hasItem(containsString(message))); + } + + private PutRoleRequest buildRequestWithApplicationPrivilege(String appName, String[] privileges, String[] resources) { + final PutRoleRequest request = new PutRoleRequest(); + request.name("test"); + final ApplicationResourcePrivileges privilege = ApplicationResourcePrivileges.builder() + .application(appName) + .privileges(privileges) + .resources(resources) + .build(); + request.addApplicationPrivileges(new ApplicationResourcePrivileges[]{privilege}); + return request; + } + + private PutRoleRequest buildRandomRequest() { + + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + + request.cluster(randomSubsetOf(Arrays.asList("monitor", "manage", "all", "manage_security", "manage_ml", "monitor_watcher")) + .toArray(Strings.EMPTY_ARRAY)); + + for (int i = randomIntBetween(0, 4); i > 0; i--) { + request.addIndex( + generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), false, false), + randomSubsetOf(randomIntBetween(1, 2), "read", "write", "index", "all").toArray(Strings.EMPTY_ARRAY), + generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), true), + generateRandomStringArray(randomIntBetween(1, 3), randomIntBetween(3, 8), true), + null + ); + } + + final Supplier stringWithInitialLowercase = () + -> randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(3, 12); + final ApplicationResourcePrivileges[] applicationPrivileges = new ApplicationResourcePrivileges[randomIntBetween(0, 5)]; + for (int i = 0; i < applicationPrivileges.length; i++) { + applicationPrivileges[i] = ApplicationResourcePrivileges.builder() + .application(stringWithInitialLowercase.get()) + .privileges(randomArray(1, 3, String[]::new, stringWithInitialLowercase)) + .resources(generateRandomStringArray(5, randomIntBetween(3, 8), false, false)) + .build(); + } + request.addApplicationPrivileges(applicationPrivileges); + + if (randomBoolean()) { + final String[] appNames = randomArray(1, 4, String[]::new, stringWithInitialLowercase); + request.conditionalCluster(new ConditionalClusterPrivileges.ManageApplicationPrivileges(Sets.newHashSet(appNames))); + } + + request.runAs(generateRandomStringArray(4, 3, false, true)); + + final Map metadata = new HashMap<>(); + for (String key : generateRandomStringArray(3, 5, false, true)) { + metadata.put(key, randomFrom(Boolean.TRUE, Boolean.FALSE, 1, 2, randomAlphaOfLengthBetween(2, 9))); + } + request.metadata(metadata); + + request.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); + return request; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestTests.java new file mode 100644 index 0000000000000..f458311e68537 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesRequestTests.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.action.user; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class HasPrivilegesRequestTests extends ESTestCase { + + public void testSerializationV7() throws IOException { + final HasPrivilegesRequest original = randomRequest(); + final HasPrivilegesRequest copy = serializeAndDeserialize(original, Version.V_7_0_0_alpha1); + + assertThat(copy.username(), equalTo(original.username())); + assertThat(copy.clusterPrivileges(), equalTo(original.clusterPrivileges())); + assertThat(copy.indexPrivileges(), equalTo(original.indexPrivileges())); + assertThat(copy.applicationPrivileges(), equalTo(original.applicationPrivileges())); + } + + public void testSerializationV63() throws IOException { + final HasPrivilegesRequest original = randomRequest(); + final HasPrivilegesRequest copy = serializeAndDeserialize(original, Version.V_6_3_0); + + assertThat(copy.username(), equalTo(original.username())); + assertThat(copy.clusterPrivileges(), equalTo(original.clusterPrivileges())); + assertThat(copy.indexPrivileges(), equalTo(original.indexPrivileges())); + assertThat(copy.applicationPrivileges(), nullValue()); + } + + public void testValidateNullPrivileges() { + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + final ActionRequestValidationException exception = request.validate(); + assertThat(exception, notNullValue()); + assertThat(exception.validationErrors(), hasItem("clusterPrivileges must not be null")); + assertThat(exception.validationErrors(), hasItem("indexPrivileges must not be null")); + assertThat(exception.validationErrors(), hasItem("applicationPrivileges must not be null")); + } + + public void testValidateEmptyPrivileges() { + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.clusterPrivileges(new String[0]); + request.indexPrivileges(new IndicesPrivileges[0]); + request.applicationPrivileges(new ApplicationResourcePrivileges[0]); + final ActionRequestValidationException exception = request.validate(); + assertThat(exception, notNullValue()); + assertThat(exception.validationErrors(), hasItem("must specify at least one privilege")); + } + + public void testValidateNoWildcardApplicationPrivileges() { + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.clusterPrivileges(new String[0]); + request.indexPrivileges(new IndicesPrivileges[0]); + request.applicationPrivileges(new ApplicationResourcePrivileges[] { + ApplicationResourcePrivileges.builder().privileges("read").application("*").resources("item/1").build() + }); + final ActionRequestValidationException exception = request.validate(); + assertThat(exception, notNullValue()); + assertThat(exception.validationErrors(), hasItem("Application names may not contain '*' (found '*')")); + } + + private HasPrivilegesRequest serializeAndDeserialize(HasPrivilegesRequest original, Version version) throws IOException { + final BytesStreamOutput out = new BytesStreamOutput(); + out.setVersion(version); + original.writeTo(out); + + final HasPrivilegesRequest copy = new HasPrivilegesRequest(); + final StreamInput in = out.bytes().streamInput(); + in.setVersion(version); + copy.readFrom(in); + assertThat(in.read(), equalTo(-1)); + return copy; + } + + private HasPrivilegesRequest randomRequest() { + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(randomAlphaOfLength(8)); + + final List clusterPrivileges = randomSubsetOf(Arrays.asList(ClusterPrivilege.MONITOR, ClusterPrivilege.MANAGE, + ClusterPrivilege.MANAGE_ML, ClusterPrivilege.MANAGE_SECURITY, ClusterPrivilege.MANAGE_PIPELINE, ClusterPrivilege.ALL)) + .stream().flatMap(p -> p.name().stream()).collect(Collectors.toList()); + request.clusterPrivileges(clusterPrivileges.toArray(Strings.EMPTY_ARRAY)); + + IndicesPrivileges[] indicesPrivileges = new IndicesPrivileges[randomInt(5)]; + for (int i = 0; i < indicesPrivileges.length; i++) { + indicesPrivileges[i] = IndicesPrivileges.builder() + .privileges(randomFrom("read", "write", "create", "delete", "all")) + .indices(randomAlphaOfLengthBetween(2, 8) + (randomBoolean() ? "*" : "")) + .build(); + } + request.indexPrivileges(indicesPrivileges); + + final ApplicationResourcePrivileges[] appPrivileges = new ApplicationResourcePrivileges[randomInt(5)]; + for (int i = 0; i < appPrivileges.length; i++) { + appPrivileges[i] = ApplicationResourcePrivileges.builder() + .application(randomAlphaOfLengthBetween(3, 8)) + .resources(randomAlphaOfLengthBetween(5, 7) + (randomBoolean() ? "*" : "")) + .privileges(generateRandomStringArray(6, 7, false, false)) + .build(); + } + request.applicationPrivileges(appPrivileges); + return request; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java new file mode 100644 index 0000000000000..2598461c37280 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authc; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.XPackField; +import org.mockito.Mockito; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.sameInstance; + +public class DefaultAuthenticationFailureHandlerTests extends ESTestCase { + + public void testAuthenticationRequired() { + final boolean testDefault = randomBoolean(); + final String basicAuthScheme = "Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\""; + final String bearerAuthScheme = "Bearer realm=\"" + XPackField.SECURITY + "\""; + final DefaultAuthenticationFailureHandler failuerHandler; + if (testDefault) { + failuerHandler = new DefaultAuthenticationFailureHandler(); + } else { + final Map> failureResponeHeaders = new HashMap<>(); + failureResponeHeaders.put("WWW-Authenticate", Arrays.asList(basicAuthScheme, bearerAuthScheme)); + failuerHandler = new DefaultAuthenticationFailureHandler(failureResponeHeaders); + } + assertThat(failuerHandler, is(notNullValue())); + final ElasticsearchSecurityException ese = + failuerHandler.authenticationRequired("someaction", new ThreadContext(Settings.builder().build())); + assertThat(ese, is(notNullValue())); + assertThat(ese.getMessage(), equalTo("action [someaction] requires authentication")); + assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); + if (testDefault) { + assertWWWAuthenticateWithSchemes(ese, basicAuthScheme); + } else { + assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme); + } + } + + public void testExceptionProcessingRequest() { + final String basicAuthScheme = "Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\""; + final String bearerAuthScheme = "Bearer realm=\"" + XPackField.SECURITY + "\""; + final String negotiateAuthScheme = randomFrom("Negotiate", "Negotiate Ijoijksdk"); + final Map> failureResponeHeaders = new HashMap<>(); + failureResponeHeaders.put("WWW-Authenticate", Arrays.asList(basicAuthScheme, bearerAuthScheme, negotiateAuthScheme)); + final DefaultAuthenticationFailureHandler failuerHandler = new DefaultAuthenticationFailureHandler(failureResponeHeaders); + + assertThat(failuerHandler, is(notNullValue())); + final boolean causeIsElasticsearchSecurityException = randomBoolean(); + final boolean causeIsEseAndUnauthorized = causeIsElasticsearchSecurityException && randomBoolean(); + final ElasticsearchSecurityException eseCause = (causeIsEseAndUnauthorized) + ? new ElasticsearchSecurityException("unauthorized", RestStatus.UNAUTHORIZED, null, (Object[]) null) + : new ElasticsearchSecurityException("different error", RestStatus.BAD_REQUEST, null, (Object[]) null); + final Exception cause = causeIsElasticsearchSecurityException ? eseCause : new Exception("other error"); + final boolean withAuthenticateHeader = randomBoolean(); + final String selectedScheme = randomFrom(bearerAuthScheme, basicAuthScheme, negotiateAuthScheme); + if (withAuthenticateHeader) { + eseCause.addHeader("WWW-Authenticate", Collections.singletonList(selectedScheme)); + } + + if (causeIsElasticsearchSecurityException) { + if (causeIsEseAndUnauthorized) { + final ElasticsearchSecurityException ese = failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), cause, + new ThreadContext(Settings.builder().build())); + assertThat(ese, is(notNullValue())); + assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); + assertThat(ese, is(sameInstance(cause))); + if (withAuthenticateHeader == false) { + assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme, negotiateAuthScheme); + } else { + if (selectedScheme.contains("Negotiate ")) { + assertWWWAuthenticateWithSchemes(ese, selectedScheme); + } else { + assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme, negotiateAuthScheme); + } + } + assertThat(ese.getMessage(), equalTo("unauthorized")); + } else { + expectThrows(AssertionError.class, () -> failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), cause, + new ThreadContext(Settings.builder().build()))); + } + } else { + final ElasticsearchSecurityException ese = failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), cause, + new ThreadContext(Settings.builder().build())); + assertThat(ese, is(notNullValue())); + assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); + assertThat(ese.getMessage(), equalTo("error attempting to authenticate request")); + assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme, negotiateAuthScheme); + } + + } + + private void assertWWWAuthenticateWithSchemes(final ElasticsearchSecurityException ese, final String... schemes) { + assertThat(ese.getHeader("WWW-Authenticate").size(), is(schemes.length)); + assertThat(ese.getHeader("WWW-Authenticate"), contains(schemes)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermissionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermissionTests.java new file mode 100644 index 0000000000000..47a189b41f12d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermissionTests.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.permission; + +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.equalTo; + +public class ApplicationPermissionTests extends ESTestCase { + + private List store = new ArrayList<>(); + + private ApplicationPrivilege app1All = storePrivilege("app1", "all", "*"); + private ApplicationPrivilege app1Empty = storePrivilege("app1", "empty"); + private ApplicationPrivilege app1Read = storePrivilege("app1", "read", "read/*"); + private ApplicationPrivilege app1Write = storePrivilege("app1", "write", "write/*"); + private ApplicationPrivilege app1Delete = storePrivilege("app1", "delete", "write/delete"); + private ApplicationPrivilege app1Create = storePrivilege("app1", "create", "write/create"); + private ApplicationPrivilege app2Read = storePrivilege("app2", "read", "read/*"); + + private ApplicationPrivilege storePrivilege(String app, String name, String... patterns) { + store.add(new ApplicationPrivilegeDescriptor(app, name, Sets.newHashSet(patterns), Collections.emptyMap())); + return new ApplicationPrivilege(app, name, patterns); + } + + public void testCheckSimplePermission() { + final ApplicationPermission hasPermission = buildPermission(app1Write, "*"); + assertThat(hasPermission.grants(app1Write, "*"), equalTo(true)); + assertThat(hasPermission.grants(app1Write, "foo"), equalTo(true)); + assertThat(hasPermission.grants(app1Delete, "*"), equalTo(true)); + assertThat(hasPermission.grants(app1Create, "foo"), equalTo(true)); + + assertThat(hasPermission.grants(app1Read, "*"), equalTo(false)); + assertThat(hasPermission.grants(app1Read, "foo"), equalTo(false)); + assertThat(hasPermission.grants(app1All, "*"), equalTo(false)); + assertThat(hasPermission.grants(app1All, "foo"), equalTo(false)); + } + + public void testNonePermission() { + final ApplicationPermission hasPermission = buildPermission(ApplicationPrivilege.NONE.apply("app1"), "*"); + for (ApplicationPrivilege privilege : Arrays.asList(app1All, app1Empty, app1Create, app1Delete, app1Read, app1Write, app2Read)) { + assertThat("Privilege " + privilege + " on *", hasPermission.grants(privilege, "*"), equalTo(false)); + final String resource = randomAlphaOfLengthBetween(1, 6); + assertThat("Privilege " + privilege + " on " + resource, hasPermission.grants(privilege, resource), equalTo(false)); + } + } + + public void testResourceMatching() { + final ApplicationPermission hasPermission = buildPermission(app1All, "dashboard/*", "audit/*", "user/12345"); + + assertThat(hasPermission.grants(app1Write, "*"), equalTo(false)); + assertThat(hasPermission.grants(app1Write, "dashboard"), equalTo(false)); + assertThat(hasPermission.grants(app1Write, "dashboard/999"), equalTo(true)); + + assertThat(hasPermission.grants(app1Create, "audit/2018-02-21"), equalTo(true)); + assertThat(hasPermission.grants(app1Create, "report/2018-02-21"), equalTo(false)); + + assertThat(hasPermission.grants(app1Read, "user/12345"), equalTo(true)); + assertThat(hasPermission.grants(app1Read, "user/67890"), equalTo(false)); + + assertThat(hasPermission.grants(app1All, "dashboard/999"), equalTo(true)); + assertThat(hasPermission.grants(app1All, "audit/2018-02-21"), equalTo(true)); + assertThat(hasPermission.grants(app1All, "user/12345"), equalTo(true)); + } + + public void testActionMatching() { + final ApplicationPermission hasPermission = buildPermission(app1Write, "allow/*"); + + final ApplicationPrivilege update = actionPrivilege("app1", "write/update"); + assertThat(hasPermission.grants(update, "allow/1"), equalTo(true)); + assertThat(hasPermission.grants(update, "deny/1"), equalTo(false)); + + final ApplicationPrivilege updateCreate = actionPrivilege("app1", "write/update", "write/create"); + assertThat(hasPermission.grants(updateCreate, "allow/1"), equalTo(true)); + assertThat(hasPermission.grants(updateCreate, "deny/1"), equalTo(false)); + + final ApplicationPrivilege manage = actionPrivilege("app1", "admin/manage"); + assertThat(hasPermission.grants(manage, "allow/1"), equalTo(false)); + assertThat(hasPermission.grants(manage, "deny/1"), equalTo(false)); + } + + public void testDoesNotMatchAcrossApplications() { + assertThat(buildPermission(app1Read, "*").grants(app1Read, "123"), equalTo(true)); + assertThat(buildPermission(app1All, "*").grants(app1Read, "123"), equalTo(true)); + + assertThat(buildPermission(app1Read, "*").grants(app2Read, "123"), equalTo(false)); + assertThat(buildPermission(app1All, "*").grants(app2Read, "123"), equalTo(false)); + } + + public void testMergedPermissionChecking() { + final ApplicationPrivilege app1ReadWrite = ApplicationPrivilege.get("app1", Sets.union(app1Read.name(), app1Write.name()), store); + final ApplicationPermission hasPermission = buildPermission(app1ReadWrite, "allow/*"); + + assertThat(hasPermission.grants(app1Read, "allow/1"), equalTo(true)); + assertThat(hasPermission.grants(app1Write, "allow/1"), equalTo(true)); + + assertThat(hasPermission.grants(app1Read, "deny/1"), equalTo(false)); + assertThat(hasPermission.grants(app1Write, "deny/1"), equalTo(false)); + + assertThat(hasPermission.grants(app1All, "allow/1"), equalTo(false)); + assertThat(hasPermission.grants(app2Read, "allow/1"), equalTo(false)); + } + + private ApplicationPrivilege actionPrivilege(String appName, String... actions) { + return ApplicationPrivilege.get(appName, Sets.newHashSet(actions), Collections.emptyList()); + } + + private ApplicationPermission buildPermission(ApplicationPrivilege privilege, String... resources) { + return new ApplicationPermission(singletonList(new Tuple<>(privilege, Sets.newHashSet(resources)))); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptorTests.java new file mode 100644 index 0000000000000..9db998bd2d1d2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptorTests.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.hamcrest.Matchers; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.DeprecationHandler.THROW_UNSUPPORTED_OPERATION; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.iterableWithSize; + +public class ApplicationPrivilegeDescriptorTests extends ESTestCase { + + public void testEqualsAndHashCode() { + final ApplicationPrivilegeDescriptor privilege = randomPrivilege(); + final EqualsHashCodeTestUtils.MutateFunction mutate = randomFrom( + orig -> new ApplicationPrivilegeDescriptor( + "x" + orig.getApplication(), orig.getName(), orig.getActions(), orig.getMetadata()), + orig -> new ApplicationPrivilegeDescriptor( + orig.getApplication(), "x" + orig.getName(), orig.getActions(), orig.getMetadata()), + orig -> new ApplicationPrivilegeDescriptor( + orig.getApplication(), orig.getName(), Collections.singleton("*"), orig.getMetadata()), + orig -> new ApplicationPrivilegeDescriptor( + orig.getApplication(), orig.getName(), orig.getActions(), Collections.singletonMap("mutate", -1L)) + ); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(privilege, + original -> new ApplicationPrivilegeDescriptor( + original.getApplication(), original.getName(), original.getActions(), original.getMetadata()), + mutate + ); + } + + public void testSerialization() throws IOException { + try (BytesStreamOutput out = new BytesStreamOutput()) { + final ApplicationPrivilegeDescriptor original = randomPrivilege(); + original.writeTo(out); + final ApplicationPrivilegeDescriptor clone = new ApplicationPrivilegeDescriptor(out.bytes().streamInput()); + assertThat(clone, Matchers.equalTo(original)); + assertThat(original, Matchers.equalTo(clone)); + } + } + + public void testXContentGenerationAndParsing() throws IOException { + final boolean includeTypeField = randomBoolean(); + + final XContent xContent = randomFrom(XContentType.values()).xContent(); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + final XContentBuilder builder = new XContentBuilder(xContent, out); + + final ApplicationPrivilegeDescriptor original = randomPrivilege(); + if (includeTypeField) { + original.toXContent(builder, true); + } else if (randomBoolean()) { + original.toXContent(builder, false); + } else { + original.toXContent(builder, ToXContent.EMPTY_PARAMS); + } + builder.flush(); + + final byte[] bytes = out.toByteArray(); + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, bytes)) { + final ApplicationPrivilegeDescriptor clone = ApplicationPrivilegeDescriptor.parse(parser, + randomBoolean() ? randomAlphaOfLength(3) : null, + randomBoolean() ? randomAlphaOfLength(3) : null, + includeTypeField); + assertThat(clone, Matchers.equalTo(original)); + assertThat(original, Matchers.equalTo(clone)); + } + } + } + + public void testParseXContentWithDefaultNames() throws IOException { + final String json = "{ \"actions\": [ \"data:read\" ], \"metadata\" : { \"num\": 1, \"bool\":false } }"; + final XContent xContent = XContentType.JSON.xContent(); + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, json)) { + final ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, "my_app", "read", false); + assertThat(privilege.getApplication(), equalTo("my_app")); + assertThat(privilege.getName(), equalTo("read")); + assertThat(privilege.getActions(), contains("data:read")); + assertThat(privilege.getMetadata().entrySet(), iterableWithSize(2)); + assertThat(privilege.getMetadata().get("num"), equalTo(1)); + assertThat(privilege.getMetadata().get("bool"), equalTo(false)); + } + } + + public void testParseXContentWithoutUsingDefaultNames() throws IOException { + final String json = "{" + + " \"application\": \"your_app\"," + + " \"name\": \"write\"," + + " \"actions\": [ \"data:write\" ]" + + "}"; + final XContent xContent = XContentType.JSON.xContent(); + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, json)) { + final ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, "my_app", "read", false); + assertThat(privilege.getApplication(), equalTo("your_app")); + assertThat(privilege.getName(), equalTo("write")); + assertThat(privilege.getActions(), contains("data:write")); + assertThat(privilege.getMetadata().entrySet(), iterableWithSize(0)); + } + } + + private ApplicationPrivilegeDescriptor randomPrivilege() { + final String applicationName; + if (randomBoolean()) { + applicationName = "*"; + } else { + applicationName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 10); + } + final String privilegeName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 8); + final String[] patterns = new String[randomIntBetween(0, 5)]; + for (int i = 0; i < patterns.length; i++) { + final String suffix = randomBoolean() ? "*" : randomAlphaOfLengthBetween(4, 9); + patterns[i] = randomAlphaOfLengthBetween(2, 5) + "/" + suffix; + } + + final Map metadata = new HashMap<>(); + for (int i = randomInt(3); i > 0; i--) { + metadata.put(randomAlphaOfLengthBetween(2, 5), randomFrom(randomBoolean(), randomInt(10), randomAlphaOfLength(5))); + } + return new ApplicationPrivilegeDescriptor(applicationName, privilegeName, Sets.newHashSet(patterns), metadata); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java new file mode 100644 index 0000000000000..c65f06f05f957 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java @@ -0,0 +1,202 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.privilege; + +import junit.framework.AssertionFailedError; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.junit.Assert; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class ApplicationPrivilegeTests extends ESTestCase { + + public void testValidationOfApplicationName() { + final String specialCharacters = ":;$#%()+='.{}[]!@^&'"; + final Supplier specialCharacter = () -> specialCharacters.charAt(randomInt(specialCharacters.length() - 1)); + + assertValidationFailure("a p p", "application name", () -> ApplicationPrivilege.validateApplicationName("a p p")); + assertValidationFailure("ap", "application name", () -> ApplicationPrivilege.validateApplicationName("ap")); + for (String app : Arrays.asList( + "App",// must start with lowercase + "1app", // must start with letter + "app" + specialCharacter.get() // cannot contain special characters unless preceded by a "-" or "_" + )) { + assertValidationFailure(app, "application name", () -> ApplicationPrivilege.validateApplicationName(app)); + assertValidationFailure(app, "application name", () -> ApplicationPrivilege.validateApplicationNameOrWildcard(app)); + } + + // no wildcards + assertValidationFailure("app*", "application names", () -> ApplicationPrivilege.validateApplicationName("app*")); + // no special characters with wildcards + final String appNameWithSpecialCharAndWildcard = "app" + specialCharacter.get() + "*"; + assertValidationFailure(appNameWithSpecialCharAndWildcard, "application name", + () -> ApplicationPrivilege.validateApplicationNameOrWildcard(appNameWithSpecialCharAndWildcard)); + + String appNameWithSpecialChars = "myapp" + randomFrom('-', '_'); + for (int i = randomIntBetween(1, 12); i > 0; i--) { + appNameWithSpecialChars = appNameWithSpecialChars + specialCharacter.get(); + } + // these should all be OK + for (String app : Arrays.asList("app", "app1", "myApp", "myApp-:;$#%()+='.", "myApp_:;$#%()+='.", appNameWithSpecialChars)) { + assertNoException(app, () -> ApplicationPrivilege.validateApplicationName(app)); + assertNoException(app, () -> ApplicationPrivilege.validateApplicationNameOrWildcard(app)); + } + } + + public void testValidationOfPrivilegeName() { + // must start with lowercase + assertValidationFailure("Read", "privilege names", () -> ApplicationPrivilege.validatePrivilegeName("Read")); + // must start with letter + assertValidationFailure("1read", "privilege names", () -> ApplicationPrivilege.validatePrivilegeName("1read")); + // cannot contain special characters + final String specialChars = ":;$#%()+=/',"; + final String withSpecialChar = "read" + specialChars.charAt(randomInt(specialChars.length()-1)); + assertValidationFailure(withSpecialChar, "privilege names", () -> ApplicationPrivilege.validatePrivilegeName(withSpecialChar)); + + // these should all be OK + for (String priv : Arrays.asList("read", "read1", "readData", "read-data", "read.data", "read_data")) { + assertNoException(priv, () -> ApplicationPrivilege.validatePrivilegeName(priv)); + assertNoException(priv, () -> ApplicationPrivilege.validatePrivilegeOrActionName(priv)); + } + + for (String priv : Arrays.asList("r e a d", "read\n", "copy®")) { + assertValidationFailure(priv, "privilege names and action", () -> ApplicationPrivilege.validatePrivilegeOrActionName(priv)); + } + + for (String priv : Arrays.asList("read:*", "read/*", "read/a_b.c-d+e%f#(g)")) { + assertNoException(priv, () -> ApplicationPrivilege.validatePrivilegeOrActionName(priv)); + } + } + + public void testNonePrivilege() { + final ApplicationPrivilege none = ApplicationPrivilege.NONE.apply("super-mega-app"); + CharacterRunAutomaton run = new CharacterRunAutomaton(none.getAutomaton()); + for (int i = randomIntBetween(5, 10); i > 0; i--) { + final String action; + if (randomBoolean()) { + action = randomAlphaOfLengthBetween(3, 12); + } else { + action = randomAlphaOfLengthBetween(3, 6) + randomFrom(":", "/") + randomAlphaOfLengthBetween(3, 8); + } + assertFalse("NONE should not grant " + action, run.run(action)); + } + } + + public void testGetPrivilegeByName() { + final ApplicationPrivilegeDescriptor descriptor = descriptor("my-app", "read", "data:read/*", "action:login"); + final ApplicationPrivilegeDescriptor myWrite = descriptor("my-app", "write", "data:write/*", "action:login"); + final ApplicationPrivilegeDescriptor myAdmin = descriptor("my-app", "admin", "data:read/*", "action:*"); + final ApplicationPrivilegeDescriptor yourRead = descriptor("your-app", "read", "data:read/*", "action:login"); + final Set stored = Sets.newHashSet(descriptor, myWrite, myAdmin, yourRead); + + assertEqual(ApplicationPrivilege.get("my-app", Collections.singleton("read"), stored), descriptor); + assertEqual(ApplicationPrivilege.get("my-app", Collections.singleton("write"), stored), myWrite); + + final ApplicationPrivilege readWrite = ApplicationPrivilege.get("my-app", Sets.newHashSet("read", "write"), stored); + assertThat(readWrite.getApplication(), equalTo("my-app")); + assertThat(readWrite.name(), containsInAnyOrder("read", "write")); + assertThat(readWrite.getPatterns(), arrayContainingInAnyOrder("data:read/*", "data:write/*", "action:login")); + + CharacterRunAutomaton run = new CharacterRunAutomaton(readWrite.getAutomaton()); + for (String action : Arrays.asList("data:read/settings", "data:write/user/kimchy", "action:login")) { + assertTrue(run.run(action)); + } + for (String action : Arrays.asList("data:delete/user/kimchy", "action:shutdown")) { + assertFalse(run.run(action)); + } + } + + private void assertEqual(ApplicationPrivilege myReadPriv, ApplicationPrivilegeDescriptor myRead) { + assertThat(myReadPriv.getApplication(), equalTo(myRead.getApplication())); + assertThat(getPrivilegeName(myReadPriv), equalTo(myRead.getName())); + assertThat(Sets.newHashSet(myReadPriv.getPatterns()), equalTo(myRead.getActions())); + } + + private ApplicationPrivilegeDescriptor descriptor(String application, String name, String... actions) { + return new ApplicationPrivilegeDescriptor(application, name, Sets.newHashSet(actions), Collections.emptyMap()); + } + + public void testEqualsAndHashCode() { + final ApplicationPrivilege privilege = randomPrivilege(); + final EqualsHashCodeTestUtils.MutateFunction mutate = randomFrom( + orig -> createPrivilege("x" + orig.getApplication(), getPrivilegeName(orig), orig.getPatterns()), + orig -> createPrivilege(orig.getApplication(), "x" + getPrivilegeName(orig), orig.getPatterns()), + orig -> new ApplicationPrivilege(orig.getApplication(), getPrivilegeName(orig), "*") + ); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(privilege, + original -> createPrivilege(original.getApplication(), getPrivilegeName(original), original.getPatterns()), + mutate + ); + } + + private ApplicationPrivilege createPrivilege(String applicationName, String privilegeName, String... patterns) { + return new ApplicationPrivilege(applicationName, privilegeName, patterns); + } + + private String getPrivilegeName(ApplicationPrivilege privilege) { + if (privilege.name.size() == 1) { + return privilege.name.iterator().next(); + } else { + throw new IllegalStateException(privilege + " has a multivariate name: " + collectionToCommaDelimitedString(privilege.name)); + } + } + + private void assertValidationFailure(String reason,String messageContent, ThrowingRunnable body) { + final IllegalArgumentException exception; + try { + exception = expectThrows(IllegalArgumentException.class, body); + assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString(messageContent.toLowerCase(Locale.ROOT))); + } catch (AssertionFailedError e) { + fail(reason + " - " + e.getMessage()); + } + } + + private void assertNoException(String reason, ThrowingRunnable body) { + try { + body.run(); + // pass + } catch (Throwable e) { + Assert.fail(reason + " - Expected no exception, but got: " + e); + } + } + + private ApplicationPrivilege randomPrivilege() { + final String applicationName; + if (randomBoolean()) { + applicationName = "*"; + } else { + applicationName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 10); + } + final String privilegeName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 8); + final String[] patterns = new String[randomIntBetween(0, 5)]; + for (int i = 0; i < patterns.length; i++) { + final String suffix = randomBoolean() ? "*" : randomAlphaOfLengthBetween(4, 9); + patterns[i] = randomAlphaOfLengthBetween(2, 5) + "/" + suffix; + } + + final Map metadata = new HashMap<>(); + for (int i = randomInt(3); i > 0; i--) { + metadata.put(randomAlphaOfLengthBetween(2, 5), randomFrom(randomBoolean(), randomInt(10), randomAlphaOfLength(5))); + } + return createPrivilege(applicationName, privilegeName, patterns); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilegesTests.java new file mode 100644 index 0000000000000..ebcd70869cb02 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConditionalClusterPrivilegesTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.XPackClientPlugin; + +import java.io.ByteArrayOutputStream; +import java.util.Arrays; +import java.util.List; + +import static org.elasticsearch.common.xcontent.DeprecationHandler.THROW_UNSUPPORTED_OPERATION; +import static org.hamcrest.Matchers.equalTo; + +public class ConditionalClusterPrivilegesTests extends ESTestCase { + + public void testSerialization() throws Exception { + final ConditionalClusterPrivilege[] original = buildSecurityPrivileges(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + ConditionalClusterPrivileges.writeArray(out, original); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin(Settings.EMPTY).getNamedWriteables()); + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) { + final ConditionalClusterPrivilege[] copy = ConditionalClusterPrivileges.readArray(in); + assertThat(copy, equalTo(original)); + assertThat(original, equalTo(copy)); + } + } + } + + public void testGenerateAndParseXContent() throws Exception { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + final XContentBuilder builder = new XContentBuilder(xContent, out); + + final List original = Arrays.asList(buildSecurityPrivileges()); + ConditionalClusterPrivileges.toXContent(builder, ToXContent.EMPTY_PARAMS, original); + builder.flush(); + + final byte[] bytes = out.toByteArray(); + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, bytes)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + final List clone = ConditionalClusterPrivileges.parse(parser); + assertThat(clone, equalTo(original)); + assertThat(original, equalTo(clone)); + } + } + } + + private ConditionalClusterPrivilege[] buildSecurityPrivileges() { + return buildSecurityPrivileges(randomIntBetween(4, 7)); + } + + private ConditionalClusterPrivilege[] buildSecurityPrivileges(int applicationNameLength) { + return new ConditionalClusterPrivilege[] { + ManageApplicationPrivilegesTests.buildPrivileges(applicationNameLength) + }; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java new file mode 100644 index 0000000000000..a5c1bbc98d1ba --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.XPackClientPlugin; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; +import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; + +import java.io.ByteArrayOutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.function.Predicate; + +import static org.elasticsearch.common.xcontent.DeprecationHandler.THROW_UNSUPPORTED_OPERATION; +import static org.elasticsearch.test.TestMatchers.predicateMatches; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; + +public class ManageApplicationPrivilegesTests extends ESTestCase { + + public void testSerialization() throws Exception { + final ManageApplicationPrivileges original = buildPrivileges(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + original.writeTo(out); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin(Settings.EMPTY).getNamedWriteables()); + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) { + final ManageApplicationPrivileges copy = ManageApplicationPrivileges.createFrom(in); + assertThat(copy, equalTo(original)); + assertThat(original, equalTo(copy)); + } + } + } + + public void testGenerateAndParseXContent() throws Exception { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + final XContentBuilder builder = new XContentBuilder(xContent, out); + + final ManageApplicationPrivileges original = buildPrivileges(); + builder.startObject(); + original.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + builder.flush(); + + final byte[] bytes = out.toByteArray(); + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, bytes)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + // ManageApplicationPrivileges.parse requires that the parser be positioned on the "manage" field. + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + final ManageApplicationPrivileges clone = ManageApplicationPrivileges.parse(parser); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + + assertThat(clone, equalTo(original)); + assertThat(original, equalTo(clone)); + } + } + } + + public void testEqualsAndHashCode() { + final int applicationNameLength = randomIntBetween(4, 7); + final ManageApplicationPrivileges privileges = buildPrivileges(applicationNameLength); + final EqualsHashCodeTestUtils.MutateFunction mutate + = orig -> buildPrivileges(applicationNameLength + randomIntBetween(1, 3)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(privileges, this::clone, mutate); + } + + public void testPrivilege() { + final ManageApplicationPrivileges privileges = buildPrivileges(); + assertThat(privileges.getPrivilege(), instanceOf(ClusterPrivilege.class)); + for (String actionName : Arrays.asList(GetPrivilegesAction.NAME, PutPrivilegesAction.NAME, DeletePrivilegesAction.NAME)) { + assertThat(privileges.getPrivilege().predicate(), predicateMatches(actionName)); + } + for (String actionName : Arrays.asList(GetUsersAction.NAME, PutRoleAction.NAME, DeleteRoleMappingAction.NAME, + HasPrivilegesAction.NAME)) { + assertThat(privileges.getPrivilege().predicate(), not(predicateMatches(actionName))); + } + } + + public void testRequestPredicate() { + final ManageApplicationPrivileges kibanaAndLogstash = new ManageApplicationPrivileges(Sets.newHashSet("kibana-*", "logstash")); + final ManageApplicationPrivileges cloudAndSwiftype = new ManageApplicationPrivileges(Sets.newHashSet("cloud-*", "swiftype")); + final Predicate kibanaAndLogstashPredicate = kibanaAndLogstash.getRequestPredicate(); + final Predicate cloudAndSwiftypePredicate = cloudAndSwiftype.getRequestPredicate(); + assertThat(kibanaAndLogstashPredicate, notNullValue()); + assertThat(cloudAndSwiftypePredicate, notNullValue()); + + final GetPrivilegesRequest getKibana1 = new GetPrivilegesRequest(); + getKibana1.application("kibana-1"); + assertThat(kibanaAndLogstashPredicate, predicateMatches(getKibana1)); + assertThat(cloudAndSwiftypePredicate, not(predicateMatches(getKibana1))); + + final DeletePrivilegesRequest deleteLogstash = new DeletePrivilegesRequest("logstash", new String[]{"all"}); + assertThat(kibanaAndLogstashPredicate, predicateMatches(deleteLogstash)); + assertThat(cloudAndSwiftypePredicate, not(predicateMatches(deleteLogstash))); + + final PutPrivilegesRequest putKibana = new PutPrivilegesRequest(); + + final List kibanaPrivileges = new ArrayList<>(); + for (int i = randomIntBetween(2, 6); i > 0; i--) { + kibanaPrivileges.add(new ApplicationPrivilegeDescriptor("kibana-" + i, + randomAlphaOfLengthBetween(3, 6).toLowerCase(Locale.ROOT), Collections.emptySet(), Collections.emptyMap())); + } + putKibana.setPrivileges(kibanaPrivileges); + assertThat(kibanaAndLogstashPredicate, predicateMatches(putKibana)); + assertThat(cloudAndSwiftypePredicate, not(predicateMatches(putKibana))); + } + + + private ManageApplicationPrivileges clone(ManageApplicationPrivileges original) { + return new ManageApplicationPrivileges(new LinkedHashSet<>(original.getApplicationNames())); + } + + private ManageApplicationPrivileges buildPrivileges() { + return buildPrivileges(randomIntBetween(4, 7)); + } + + static ManageApplicationPrivileges buildPrivileges(int applicationNameLength) { + Set applicationNames = Sets.newHashSet(Arrays.asList(generateRandomStringArray(5, applicationNameLength, false, false))); + return new ManageApplicationPrivileges(applicationNames); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 85d2bc16dd06a..9cb5e25c5b8d1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteDatafeedAction; @@ -75,6 +76,12 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkAction; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; @@ -85,6 +92,8 @@ import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; import org.elasticsearch.xpack.core.security.user.SystemUser; @@ -104,10 +113,12 @@ import org.joda.time.DateTimeZone; import java.util.Arrays; +import java.util.Collections; import java.util.Map; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; /** * Unit tests for the {@link ReservedRolesStore} @@ -139,21 +150,23 @@ public void testIsReserved() { } public void testIngestAdminRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("ingest_admin"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role ingestAdminRole = Role.builder(roleDescriptor, null).build(); - assertThat(ingestAdminRole.cluster().check(PutIndexTemplateAction.NAME), is(true)); - assertThat(ingestAdminRole.cluster().check(GetIndexTemplatesAction.NAME), is(true)); - assertThat(ingestAdminRole.cluster().check(DeleteIndexTemplateAction.NAME), is(true)); - assertThat(ingestAdminRole.cluster().check(PutPipelineAction.NAME), is(true)); - assertThat(ingestAdminRole.cluster().check(GetPipelineAction.NAME), is(true)); - assertThat(ingestAdminRole.cluster().check(DeletePipelineAction.NAME), is(true)); + assertThat(ingestAdminRole.cluster().check(PutIndexTemplateAction.NAME, request), is(true)); + assertThat(ingestAdminRole.cluster().check(GetIndexTemplatesAction.NAME, request), is(true)); + assertThat(ingestAdminRole.cluster().check(DeleteIndexTemplateAction.NAME, request), is(true)); + assertThat(ingestAdminRole.cluster().check(PutPipelineAction.NAME, request), is(true)); + assertThat(ingestAdminRole.cluster().check(GetPipelineAction.NAME, request), is(true)); + assertThat(ingestAdminRole.cluster().check(DeletePipelineAction.NAME, request), is(true)); - assertThat(ingestAdminRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(ingestAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(ingestAdminRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + assertThat(ingestAdminRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(ingestAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(ingestAdminRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); assertThat(ingestAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); assertThat(ingestAdminRole.indices().allowedIndicesMatcher("indices:foo").test(randomAlphaOfLengthBetween(8, 24)), @@ -163,25 +176,49 @@ public void testIngestAdminRole() { } public void testKibanaSystemRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("kibana_system"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role kibanaRole = Role.builder(roleDescriptor, null).build(); - assertThat(kibanaRole.cluster().check(ClusterHealthAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(ClusterStateAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(ClusterStatsAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(PutIndexTemplateAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(GetIndexTemplatesAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(kibanaRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(kibanaRole.cluster().check(MonitoringBulkAction.NAME), is(true)); + assertThat(kibanaRole.cluster().check(ClusterHealthAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(ClusterStateAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(ClusterStatsAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(PutIndexTemplateAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(GetIndexTemplatesAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(kibanaRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(kibanaRole.cluster().check(MonitoringBulkAction.NAME, request), is(true)); // SAML - assertThat(kibanaRole.cluster().check(SamlPrepareAuthenticationAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(SamlAuthenticateAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(InvalidateTokenAction.NAME), is(true)); - assertThat(kibanaRole.cluster().check(CreateTokenAction.NAME), is(false)); + assertThat(kibanaRole.cluster().check(SamlPrepareAuthenticationAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(SamlAuthenticateAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(InvalidateTokenAction.NAME, request), is(true)); + assertThat(kibanaRole.cluster().check(CreateTokenAction.NAME, request), is(false)); + + // Application Privileges + DeletePrivilegesRequest deleteKibanaPrivileges = new DeletePrivilegesRequest("kibana-.kibana", new String[]{ "all", "read" }); + DeletePrivilegesRequest deleteLogstashPrivileges = new DeletePrivilegesRequest("logstash", new String[]{ "all", "read" }); + assertThat(kibanaRole.cluster().check(DeletePrivilegesAction.NAME, deleteKibanaPrivileges), is(true)); + assertThat(kibanaRole.cluster().check(DeletePrivilegesAction.NAME, deleteLogstashPrivileges), is(false)); + + GetPrivilegesRequest getKibanaPrivileges = new GetPrivilegesRequest(); + getKibanaPrivileges.application("kibana-.kibana-sales"); + GetPrivilegesRequest getApmPrivileges = new GetPrivilegesRequest(); + getApmPrivileges.application("apm"); + assertThat(kibanaRole.cluster().check(GetPrivilegesAction.NAME, getKibanaPrivileges), is(true)); + assertThat(kibanaRole.cluster().check(GetPrivilegesAction.NAME, getApmPrivileges), is(false)); + + PutPrivilegesRequest putKibanaPrivileges = new PutPrivilegesRequest(); + putKibanaPrivileges.setPrivileges(Collections.singletonList(new ApplicationPrivilegeDescriptor( + "kibana-.kibana-" + randomAlphaOfLengthBetween(2,6), "all", Collections.emptySet(), Collections.emptyMap()))); + PutPrivilegesRequest putSwiftypePrivileges = new PutPrivilegesRequest(); + putSwiftypePrivileges.setPrivileges(Collections.singletonList(new ApplicationPrivilegeDescriptor( + "swiftype-kibana" , "all", Collections.emptySet(), Collections.emptyMap()))); + assertThat(kibanaRole.cluster().check(PutPrivilegesAction.NAME, putKibanaPrivileges), is(true)); + assertThat(kibanaRole.cluster().check(PutPrivilegesAction.NAME, putSwiftypePrivileges), is(false)); // Everything else assertThat(kibanaRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -238,18 +275,20 @@ public void testKibanaSystemRole() { } public void testKibanaUserRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("kibana_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role kibanaUserRole = Role.builder(roleDescriptor, null).build(); - assertThat(kibanaUserRole.cluster().check(ClusterHealthAction.NAME), is(false)); - assertThat(kibanaUserRole.cluster().check(ClusterStateAction.NAME), is(false)); - assertThat(kibanaUserRole.cluster().check(ClusterStatsAction.NAME), is(false)); - assertThat(kibanaUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(kibanaUserRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(kibanaUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(kibanaUserRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + assertThat(kibanaUserRole.cluster().check(ClusterHealthAction.NAME, request), is(false)); + assertThat(kibanaUserRole.cluster().check(ClusterStateAction.NAME, request), is(false)); + assertThat(kibanaUserRole.cluster().check(ClusterStatsAction.NAME, request), is(false)); + assertThat(kibanaUserRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(kibanaUserRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(kibanaUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(kibanaUserRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); assertThat(kibanaUserRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -271,22 +310,35 @@ public void testKibanaUserRole() { assertThat(kibanaUserRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); assertThat(kibanaUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); }); + + final String randomApplication = "kibana-" + randomAlphaOfLengthBetween(8, 24); + assertThat(kibanaUserRole.application().grants(new ApplicationPrivilege(randomApplication, "app-random", "all"), "*"), is(false)); + + final String application = "kibana-.kibana"; + assertThat(kibanaUserRole.application().grants(new ApplicationPrivilege(application, "app-foo", "foo"), "*"), is(false)); + assertThat(kibanaUserRole.application().grants(new ApplicationPrivilege(application, "app-all", "all"), "*"), is(true)); + + final String applicationWithRandomIndex = "kibana-.kibana_" + randomAlphaOfLengthBetween(8, 24); + assertThat(kibanaUserRole.application().grants(new ApplicationPrivilege(applicationWithRandomIndex, "app-random-index", "all"), + "*"), is(false)); } public void testMonitoringUserRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("monitoring_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role monitoringUserRole = Role.builder(roleDescriptor, null).build(); - assertThat(monitoringUserRole.cluster().check(MainAction.NAME), is(true)); - assertThat(monitoringUserRole.cluster().check(ClusterHealthAction.NAME), is(false)); - assertThat(monitoringUserRole.cluster().check(ClusterStateAction.NAME), is(false)); - assertThat(monitoringUserRole.cluster().check(ClusterStatsAction.NAME), is(false)); - assertThat(monitoringUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(monitoringUserRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(monitoringUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(monitoringUserRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + assertThat(monitoringUserRole.cluster().check(MainAction.NAME, request), is(true)); + assertThat(monitoringUserRole.cluster().check(ClusterHealthAction.NAME, request), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterStateAction.NAME, request), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterStatsAction.NAME, request), is(false)); + assertThat(monitoringUserRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(monitoringUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(monitoringUserRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); assertThat(monitoringUserRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -313,27 +365,29 @@ public void testMonitoringUserRole() { } public void testRemoteMonitoringAgentRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("remote_monitoring_agent"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role remoteMonitoringAgentRole = Role.builder(roleDescriptor, null).build(); - assertThat(remoteMonitoringAgentRole.cluster().check(ClusterHealthAction.NAME), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStateAction.NAME), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStatsAction.NAME), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(PutIndexTemplateAction.NAME), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(remoteMonitoringAgentRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(remoteMonitoringAgentRole.cluster().check(MonitoringBulkAction.NAME), is(false)); - assertThat(remoteMonitoringAgentRole.cluster().check(GetWatchAction.NAME), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(PutWatchAction.NAME), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(DeleteWatchAction.NAME), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(ExecuteWatchAction.NAME), is(false)); - assertThat(remoteMonitoringAgentRole.cluster().check(AckWatchAction.NAME), is(false)); - assertThat(remoteMonitoringAgentRole.cluster().check(ActivateWatchAction.NAME), is(false)); - assertThat(remoteMonitoringAgentRole.cluster().check(WatcherServiceAction.NAME), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterHealthAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStateAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStatsAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(PutIndexTemplateAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(GetWatchAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(PutWatchAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(DeleteWatchAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ExecuteWatchAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(AckWatchAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(ActivateWatchAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(WatcherServiceAction.NAME, request), is(false)); // we get this from the cluster:monitor privilege - assertThat(remoteMonitoringAgentRole.cluster().check(WatcherStatsAction.NAME), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(WatcherStatsAction.NAME, request), is(true)); assertThat(remoteMonitoringAgentRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -357,18 +411,20 @@ public void testRemoteMonitoringAgentRole() { } public void testReportingUserRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("reporting_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role reportingUserRole = Role.builder(roleDescriptor, null).build(); - assertThat(reportingUserRole.cluster().check(ClusterHealthAction.NAME), is(false)); - assertThat(reportingUserRole.cluster().check(ClusterStateAction.NAME), is(false)); - assertThat(reportingUserRole.cluster().check(ClusterStatsAction.NAME), is(false)); - assertThat(reportingUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(reportingUserRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(reportingUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(reportingUserRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterHealthAction.NAME, request), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterStateAction.NAME, request), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterStatsAction.NAME, request), is(false)); + assertThat(reportingUserRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(reportingUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(reportingUserRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); assertThat(reportingUserRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -393,18 +449,20 @@ public void testReportingUserRole() { } public void testKibanaDashboardOnlyUserRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("kibana_dashboard_only_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role dashboardsOnlyUserRole = Role.builder(roleDescriptor, null).build(); - assertThat(dashboardsOnlyUserRole.cluster().check(ClusterHealthAction.NAME), is(false)); - assertThat(dashboardsOnlyUserRole.cluster().check(ClusterStateAction.NAME), is(false)); - assertThat(dashboardsOnlyUserRole.cluster().check(ClusterStatsAction.NAME), is(false)); - assertThat(dashboardsOnlyUserRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(dashboardsOnlyUserRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(dashboardsOnlyUserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(dashboardsOnlyUserRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + assertThat(dashboardsOnlyUserRole.cluster().check(ClusterHealthAction.NAME, request), is(false)); + assertThat(dashboardsOnlyUserRole.cluster().check(ClusterStateAction.NAME, request), is(false)); + assertThat(dashboardsOnlyUserRole.cluster().check(ClusterStatsAction.NAME, request), is(false)); + assertThat(dashboardsOnlyUserRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(dashboardsOnlyUserRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(dashboardsOnlyUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(dashboardsOnlyUserRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); assertThat(dashboardsOnlyUserRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -420,20 +478,35 @@ public void testKibanaDashboardOnlyUserRole() { assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(index), is(true)); assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); + + final String randomApplication = "kibana-" + randomAlphaOfLengthBetween(8, 24); + assertThat(dashboardsOnlyUserRole.application().grants(new ApplicationPrivilege(randomApplication, "app-random", "all"), "*"), + is(false)); + + final String application = "kibana-.kibana"; + assertThat(dashboardsOnlyUserRole.application().grants(new ApplicationPrivilege(application, "app-foo", "foo"), "*"), is(false)); + assertThat(dashboardsOnlyUserRole.application().grants(new ApplicationPrivilege(application, "app-all", "all"), "*"), is(false)); + assertThat(dashboardsOnlyUserRole.application().grants(new ApplicationPrivilege(application, "app-read", "read"), "*"), is(true)); + + final String applicationWithRandomIndex = "kibana-.kibana_" + randomAlphaOfLengthBetween(8, 24); + assertThat(dashboardsOnlyUserRole.application().grants( + new ApplicationPrivilege(applicationWithRandomIndex, "app-random-index", "all"), "*"), is(false)); } public void testSuperuserRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("superuser"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role superuserRole = Role.builder(roleDescriptor, null).build(); - assertThat(superuserRole.cluster().check(ClusterHealthAction.NAME), is(true)); - assertThat(superuserRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(true)); - assertThat(superuserRole.cluster().check(PutUserAction.NAME), is(true)); - assertThat(superuserRole.cluster().check(PutRoleAction.NAME), is(true)); - assertThat(superuserRole.cluster().check(PutIndexTemplateAction.NAME), is(true)); - assertThat(superuserRole.cluster().check("internal:admin/foo"), is(false)); + assertThat(superuserRole.cluster().check(ClusterHealthAction.NAME, request), is(true)); + assertThat(superuserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(true)); + assertThat(superuserRole.cluster().check(PutUserAction.NAME, request), is(true)); + assertThat(superuserRole.cluster().check(PutRoleAction.NAME, request), is(true)); + assertThat(superuserRole.cluster().check(PutIndexTemplateAction.NAME, request), is(true)); + assertThat(superuserRole.cluster().check("internal:admin/foo", request), is(false)); final Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); final MetaData metaData = new MetaData.Builder() @@ -472,18 +545,20 @@ public void testSuperuserRole() { } public void testLogstashSystemRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("logstash_system"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role logstashSystemRole = Role.builder(roleDescriptor, null).build(); - assertThat(logstashSystemRole.cluster().check(ClusterHealthAction.NAME), is(true)); - assertThat(logstashSystemRole.cluster().check(ClusterStateAction.NAME), is(true)); - assertThat(logstashSystemRole.cluster().check(ClusterStatsAction.NAME), is(true)); - assertThat(logstashSystemRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(logstashSystemRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(logstashSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(logstashSystemRole.cluster().check(MonitoringBulkAction.NAME), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterHealthAction.NAME, request), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterStateAction.NAME, request), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterStatsAction.NAME, request), is(true)); + assertThat(logstashSystemRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(logstashSystemRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(logstashSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(logstashSystemRole.cluster().check(MonitoringBulkAction.NAME, request), is(true)); assertThat(logstashSystemRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); @@ -494,18 +569,21 @@ public void testLogstashSystemRole() { } public void testBeatsAdminRole() { + final TransportRequest request = mock(TransportRequest.class); + final RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("beats_admin"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + final Role beatsAdminRole = Role.builder(roleDescriptor, null).build(); - assertThat(beatsAdminRole.cluster().check(ClusterHealthAction.NAME), is(false)); - assertThat(beatsAdminRole.cluster().check(ClusterStateAction.NAME), is(false)); - assertThat(beatsAdminRole.cluster().check(ClusterStatsAction.NAME), is(false)); - assertThat(beatsAdminRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(beatsAdminRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(beatsAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(beatsAdminRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterHealthAction.NAME, request), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterStateAction.NAME, request), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterStatsAction.NAME, request), is(false)); + assertThat(beatsAdminRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(beatsAdminRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); assertThat(beatsAdminRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); @@ -527,18 +605,20 @@ public void testBeatsAdminRole() { } public void testBeatsSystemRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor(BeatsSystemUser.ROLE_NAME); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role logstashSystemRole = Role.builder(roleDescriptor, null).build(); - assertThat(logstashSystemRole.cluster().check(ClusterHealthAction.NAME), is(true)); - assertThat(logstashSystemRole.cluster().check(ClusterStateAction.NAME), is(true)); - assertThat(logstashSystemRole.cluster().check(ClusterStatsAction.NAME), is(true)); - assertThat(logstashSystemRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(logstashSystemRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(logstashSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); - assertThat(logstashSystemRole.cluster().check(MonitoringBulkAction.NAME), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterHealthAction.NAME, request), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterStateAction.NAME, request), is(true)); + assertThat(logstashSystemRole.cluster().check(ClusterStatsAction.NAME, request), is(true)); + assertThat(logstashSystemRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(logstashSystemRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(logstashSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(logstashSystemRole.cluster().check(MonitoringBulkAction.NAME, request), is(true)); assertThat(logstashSystemRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); @@ -549,46 +629,48 @@ public void testBeatsSystemRole() { } public void testMachineLearningAdminRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("machine_learning_admin"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.builder(roleDescriptor, null).build(); - assertThat(role.cluster().check(CloseJobAction.NAME), is(true)); - assertThat(role.cluster().check(DeleteDatafeedAction.NAME), is(true)); - assertThat(role.cluster().check(DeleteExpiredDataAction.NAME), is(true)); - assertThat(role.cluster().check(DeleteFilterAction.NAME), is(true)); - assertThat(role.cluster().check(DeleteJobAction.NAME), is(true)); - assertThat(role.cluster().check(DeleteModelSnapshotAction.NAME), is(true)); - assertThat(role.cluster().check(FinalizeJobExecutionAction.NAME), is(false)); // internal use only - assertThat(role.cluster().check(FlushJobAction.NAME), is(true)); - assertThat(role.cluster().check(GetBucketsAction.NAME), is(true)); - assertThat(role.cluster().check(GetCategoriesAction.NAME), is(true)); - assertThat(role.cluster().check(GetDatafeedsAction.NAME), is(true)); - assertThat(role.cluster().check(GetDatafeedsStatsAction.NAME), is(true)); - assertThat(role.cluster().check(GetFiltersAction.NAME), is(true)); - assertThat(role.cluster().check(GetInfluencersAction.NAME), is(true)); - assertThat(role.cluster().check(GetJobsAction.NAME), is(true)); - assertThat(role.cluster().check(GetJobsStatsAction.NAME), is(true)); - assertThat(role.cluster().check(GetModelSnapshotsAction.NAME), is(true)); - assertThat(role.cluster().check(GetRecordsAction.NAME), is(true)); - assertThat(role.cluster().check(IsolateDatafeedAction.NAME), is(false)); // internal use only - assertThat(role.cluster().check(KillProcessAction.NAME), is(false)); // internal use only - assertThat(role.cluster().check(OpenJobAction.NAME), is(true)); - assertThat(role.cluster().check(PostDataAction.NAME), is(true)); - assertThat(role.cluster().check(PreviewDatafeedAction.NAME), is(true)); - assertThat(role.cluster().check(PutDatafeedAction.NAME), is(true)); - assertThat(role.cluster().check(PutFilterAction.NAME), is(true)); - assertThat(role.cluster().check(PutJobAction.NAME), is(true)); - assertThat(role.cluster().check(RevertModelSnapshotAction.NAME), is(true)); - assertThat(role.cluster().check(StartDatafeedAction.NAME), is(true)); - assertThat(role.cluster().check(StopDatafeedAction.NAME), is(true)); - assertThat(role.cluster().check(UpdateDatafeedAction.NAME), is(true)); - assertThat(role.cluster().check(UpdateJobAction.NAME), is(true)); - assertThat(role.cluster().check(UpdateModelSnapshotAction.NAME), is(true)); - assertThat(role.cluster().check(UpdateProcessAction.NAME), is(false)); // internal use only - assertThat(role.cluster().check(ValidateDetectorAction.NAME), is(true)); - assertThat(role.cluster().check(ValidateJobConfigAction.NAME), is(true)); + assertThat(role.cluster().check(CloseJobAction.NAME, request), is(true)); + assertThat(role.cluster().check(DeleteDatafeedAction.NAME, request), is(true)); + assertThat(role.cluster().check(DeleteExpiredDataAction.NAME, request), is(true)); + assertThat(role.cluster().check(DeleteFilterAction.NAME, request), is(true)); + assertThat(role.cluster().check(DeleteJobAction.NAME, request), is(true)); + assertThat(role.cluster().check(DeleteModelSnapshotAction.NAME, request), is(true)); + assertThat(role.cluster().check(FinalizeJobExecutionAction.NAME, request), is(false)); // internal use only + assertThat(role.cluster().check(FlushJobAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetBucketsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetCategoriesAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetDatafeedsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetDatafeedsStatsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetFiltersAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetInfluencersAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetJobsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetJobsStatsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetModelSnapshotsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetRecordsAction.NAME, request), is(true)); + assertThat(role.cluster().check(IsolateDatafeedAction.NAME, request), is(false)); // internal use only + assertThat(role.cluster().check(KillProcessAction.NAME, request), is(false)); // internal use only + assertThat(role.cluster().check(OpenJobAction.NAME, request), is(true)); + assertThat(role.cluster().check(PostDataAction.NAME, request), is(true)); + assertThat(role.cluster().check(PreviewDatafeedAction.NAME, request), is(true)); + assertThat(role.cluster().check(PutDatafeedAction.NAME, request), is(true)); + assertThat(role.cluster().check(PutFilterAction.NAME, request), is(true)); + assertThat(role.cluster().check(PutJobAction.NAME, request), is(true)); + assertThat(role.cluster().check(RevertModelSnapshotAction.NAME, request), is(true)); + assertThat(role.cluster().check(StartDatafeedAction.NAME, request), is(true)); + assertThat(role.cluster().check(StopDatafeedAction.NAME, request), is(true)); + assertThat(role.cluster().check(UpdateDatafeedAction.NAME, request), is(true)); + assertThat(role.cluster().check(UpdateJobAction.NAME, request), is(true)); + assertThat(role.cluster().check(UpdateModelSnapshotAction.NAME, request), is(true)); + assertThat(role.cluster().check(UpdateProcessAction.NAME, request), is(false)); // internal use only + assertThat(role.cluster().check(ValidateDetectorAction.NAME, request), is(true)); + assertThat(role.cluster().check(ValidateJobConfigAction.NAME, request), is(true)); assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); assertNoAccessAllowed(role, "foo"); @@ -599,46 +681,48 @@ public void testMachineLearningAdminRole() { } public void testMachineLearningUserRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("machine_learning_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.builder(roleDescriptor, null).build(); - assertThat(role.cluster().check(CloseJobAction.NAME), is(false)); - assertThat(role.cluster().check(DeleteDatafeedAction.NAME), is(false)); - assertThat(role.cluster().check(DeleteExpiredDataAction.NAME), is(false)); - assertThat(role.cluster().check(DeleteFilterAction.NAME), is(false)); - assertThat(role.cluster().check(DeleteJobAction.NAME), is(false)); - assertThat(role.cluster().check(DeleteModelSnapshotAction.NAME), is(false)); - assertThat(role.cluster().check(FinalizeJobExecutionAction.NAME), is(false)); - assertThat(role.cluster().check(FlushJobAction.NAME), is(false)); - assertThat(role.cluster().check(GetBucketsAction.NAME), is(true)); - assertThat(role.cluster().check(GetCategoriesAction.NAME), is(true)); - assertThat(role.cluster().check(GetDatafeedsAction.NAME), is(true)); - assertThat(role.cluster().check(GetDatafeedsStatsAction.NAME), is(true)); - assertThat(role.cluster().check(GetFiltersAction.NAME), is(false)); - assertThat(role.cluster().check(GetInfluencersAction.NAME), is(true)); - assertThat(role.cluster().check(GetJobsAction.NAME), is(true)); - assertThat(role.cluster().check(GetJobsStatsAction.NAME), is(true)); - assertThat(role.cluster().check(GetModelSnapshotsAction.NAME), is(true)); - assertThat(role.cluster().check(GetRecordsAction.NAME), is(true)); - assertThat(role.cluster().check(IsolateDatafeedAction.NAME), is(false)); - assertThat(role.cluster().check(KillProcessAction.NAME), is(false)); - assertThat(role.cluster().check(OpenJobAction.NAME), is(false)); - assertThat(role.cluster().check(PostDataAction.NAME), is(false)); - assertThat(role.cluster().check(PreviewDatafeedAction.NAME), is(false)); - assertThat(role.cluster().check(PutDatafeedAction.NAME), is(false)); - assertThat(role.cluster().check(PutFilterAction.NAME), is(false)); - assertThat(role.cluster().check(PutJobAction.NAME), is(false)); - assertThat(role.cluster().check(RevertModelSnapshotAction.NAME), is(false)); - assertThat(role.cluster().check(StartDatafeedAction.NAME), is(false)); - assertThat(role.cluster().check(StopDatafeedAction.NAME), is(false)); - assertThat(role.cluster().check(UpdateDatafeedAction.NAME), is(false)); - assertThat(role.cluster().check(UpdateJobAction.NAME), is(false)); - assertThat(role.cluster().check(UpdateModelSnapshotAction.NAME), is(false)); - assertThat(role.cluster().check(UpdateProcessAction.NAME), is(false)); - assertThat(role.cluster().check(ValidateDetectorAction.NAME), is(false)); - assertThat(role.cluster().check(ValidateJobConfigAction.NAME), is(false)); + assertThat(role.cluster().check(CloseJobAction.NAME, request), is(false)); + assertThat(role.cluster().check(DeleteDatafeedAction.NAME, request), is(false)); + assertThat(role.cluster().check(DeleteExpiredDataAction.NAME, request), is(false)); + assertThat(role.cluster().check(DeleteFilterAction.NAME, request), is(false)); + assertThat(role.cluster().check(DeleteJobAction.NAME, request), is(false)); + assertThat(role.cluster().check(DeleteModelSnapshotAction.NAME, request), is(false)); + assertThat(role.cluster().check(FinalizeJobExecutionAction.NAME, request), is(false)); + assertThat(role.cluster().check(FlushJobAction.NAME, request), is(false)); + assertThat(role.cluster().check(GetBucketsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetCategoriesAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetDatafeedsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetDatafeedsStatsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetFiltersAction.NAME, request), is(false)); + assertThat(role.cluster().check(GetInfluencersAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetJobsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetJobsStatsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetModelSnapshotsAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetRecordsAction.NAME, request), is(true)); + assertThat(role.cluster().check(IsolateDatafeedAction.NAME, request), is(false)); + assertThat(role.cluster().check(KillProcessAction.NAME, request), is(false)); + assertThat(role.cluster().check(OpenJobAction.NAME, request), is(false)); + assertThat(role.cluster().check(PostDataAction.NAME, request), is(false)); + assertThat(role.cluster().check(PreviewDatafeedAction.NAME, request), is(false)); + assertThat(role.cluster().check(PutDatafeedAction.NAME, request), is(false)); + assertThat(role.cluster().check(PutFilterAction.NAME, request), is(false)); + assertThat(role.cluster().check(PutJobAction.NAME, request), is(false)); + assertThat(role.cluster().check(RevertModelSnapshotAction.NAME, request), is(false)); + assertThat(role.cluster().check(StartDatafeedAction.NAME, request), is(false)); + assertThat(role.cluster().check(StopDatafeedAction.NAME, request), is(false)); + assertThat(role.cluster().check(UpdateDatafeedAction.NAME, request), is(false)); + assertThat(role.cluster().check(UpdateJobAction.NAME, request), is(false)); + assertThat(role.cluster().check(UpdateModelSnapshotAction.NAME, request), is(false)); + assertThat(role.cluster().check(UpdateProcessAction.NAME, request), is(false)); + assertThat(role.cluster().check(ValidateDetectorAction.NAME, request), is(false)); + assertThat(role.cluster().check(ValidateJobConfigAction.NAME, request), is(false)); assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); assertNoAccessAllowed(role, "foo"); @@ -649,19 +733,21 @@ public void testMachineLearningUserRole() { } public void testWatcherAdminRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("watcher_admin"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.builder(roleDescriptor, null).build(); - assertThat(role.cluster().check(PutWatchAction.NAME), is(true)); - assertThat(role.cluster().check(GetWatchAction.NAME), is(true)); - assertThat(role.cluster().check(DeleteWatchAction.NAME), is(true)); - assertThat(role.cluster().check(ExecuteWatchAction.NAME), is(true)); - assertThat(role.cluster().check(AckWatchAction.NAME), is(true)); - assertThat(role.cluster().check(ActivateWatchAction.NAME), is(true)); - assertThat(role.cluster().check(WatcherServiceAction.NAME), is(true)); - assertThat(role.cluster().check(WatcherStatsAction.NAME), is(true)); + assertThat(role.cluster().check(PutWatchAction.NAME, request), is(true)); + assertThat(role.cluster().check(GetWatchAction.NAME, request), is(true)); + assertThat(role.cluster().check(DeleteWatchAction.NAME, request), is(true)); + assertThat(role.cluster().check(ExecuteWatchAction.NAME, request), is(true)); + assertThat(role.cluster().check(AckWatchAction.NAME, request), is(true)); + assertThat(role.cluster().check(ActivateWatchAction.NAME, request), is(true)); + assertThat(role.cluster().check(WatcherServiceAction.NAME, request), is(true)); + assertThat(role.cluster().check(WatcherStatsAction.NAME, request), is(true)); assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); assertThat(role.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); @@ -674,19 +760,21 @@ public void testWatcherAdminRole() { } public void testWatcherUserRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("watcher_user"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role role = Role.builder(roleDescriptor, null).build(); - assertThat(role.cluster().check(PutWatchAction.NAME), is(false)); - assertThat(role.cluster().check(GetWatchAction.NAME), is(true)); - assertThat(role.cluster().check(DeleteWatchAction.NAME), is(false)); - assertThat(role.cluster().check(ExecuteWatchAction.NAME), is(false)); - assertThat(role.cluster().check(AckWatchAction.NAME), is(false)); - assertThat(role.cluster().check(ActivateWatchAction.NAME), is(false)); - assertThat(role.cluster().check(WatcherServiceAction.NAME), is(false)); - assertThat(role.cluster().check(WatcherStatsAction.NAME), is(true)); + assertThat(role.cluster().check(PutWatchAction.NAME, request), is(false)); + assertThat(role.cluster().check(GetWatchAction.NAME, request), is(true)); + assertThat(role.cluster().check(DeleteWatchAction.NAME, request), is(false)); + assertThat(role.cluster().check(ExecuteWatchAction.NAME, request), is(false)); + assertThat(role.cluster().check(AckWatchAction.NAME, request), is(false)); + assertThat(role.cluster().check(ActivateWatchAction.NAME, request), is(false)); + assertThat(role.cluster().check(WatcherServiceAction.NAME, request), is(false)); + assertThat(role.cluster().check(WatcherStatsAction.NAME, request), is(true)); assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); assertThat(role.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false)); @@ -724,15 +812,17 @@ private void assertNoAccessAllowed(Role role, String index) { } public void testLogstashAdminRole() { + final TransportRequest request = mock(TransportRequest.class); + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("logstash_admin"); assertNotNull(roleDescriptor); assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); Role logstashAdminRole = Role.builder(roleDescriptor, null).build(); - assertThat(logstashAdminRole.cluster().check(ClusterHealthAction.NAME), is(false)); - assertThat(logstashAdminRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); - assertThat(logstashAdminRole.cluster().check(ClusterRerouteAction.NAME), is(false)); - assertThat(logstashAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(logstashAdminRole.cluster().check(ClusterHealthAction.NAME, request), is(false)); + assertThat(logstashAdminRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(logstashAdminRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(logstashAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); assertThat(logstashAdminRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java index 92c3b1d77133e..72c988fc22710 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java @@ -8,8 +8,11 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -113,4 +116,39 @@ private void assertInvalidPattern(String text) { // expected } } + + public void testLotsOfIndices() { + final int numberOfIndices = scaledRandomIntBetween(512, 1024); + final List names = new ArrayList<>(numberOfIndices); + for (int i = 0; i < numberOfIndices; i++) { + names.add(randomAlphaOfLengthBetween(6, 48)); + } + final Automaton automaton = Automatons.patterns(names); + assertTrue(automaton.isDeterministic()); + + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); + for (String name : names) { + assertTrue(runAutomaton.run(name)); + } + } + + public void testSettingMaxDeterminizedStates() { + try { + assertNotEquals(10000, Automatons.getMaxDeterminizedStates()); + // set to the min value + Settings settings = Settings.builder().put(Automatons.MAX_DETERMINIZED_STATES_SETTING.getKey(), 10000).build(); + Automatons.updateMaxDeterminizedStates(settings); + assertEquals(10000, Automatons.getMaxDeterminizedStates()); + + final List names = new ArrayList<>(1024); + for (int i = 0; i < 1024; i++) { + names.add(randomAlphaOfLength(48)); + } + TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class, () -> Automatons.patterns(names)); + assertThat(e.getMaxDeterminizedStates(), equalTo(10000)); + } finally { + Automatons.updateMaxDeterminizedStates(Settings.EMPTY); + assertEquals(100000, Automatons.getMaxDeterminizedStates()); + } + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index d8e0b693f7008..3e36550e46f2b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -78,6 +78,7 @@ public void cleanup() throws Exception { /** * Tests reloading a keystore that is used in the KeyManager of SSLContext */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32124") public void testReloadingKeyStore() throws Exception { assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); final Path tempDir = createTempDir(); @@ -191,6 +192,7 @@ public void testPEMKeyConfigReloading() throws Exception { * Tests the reloading of SSLContext when the trust store is modified. The same store is used as a TrustStore (for the * reloadable SSLContext used in the HTTPClient) and as a KeyStore for the MockWebServer */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32124") public void testReloadingTrustStore() throws Exception { assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path tempDir = createTempDir(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java index 0e3627d64ff6e..5e9fd4a386b64 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java @@ -10,6 +10,7 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -25,12 +26,10 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; -import static java.util.Collections.singletonMap; import static org.junit.Assert.assertEquals; public final class XPackRestTestHelper { @@ -47,8 +46,9 @@ public static void waitForMlTemplates(RestClient client) throws InterruptedExcep ESTestCase.awaitBusy(() -> { String response; try { - response = EntityUtils - .toString(client.performRequest("GET", "/_cat/nodes", singletonMap("h", "master,version")).getEntity()); + Request request = new Request("GET", "/_cat/nodes"); + request.addParameter("h", "master,version"); + response = EntityUtils.toString(client.performRequest(request).getEntity()); } catch (IOException e) { throw new RuntimeException(e); } @@ -67,7 +67,7 @@ public static void waitForMlTemplates(RestClient client) throws InterruptedExcep ESTestCase.awaitBusy(() -> { Map response; try { - String string = EntityUtils.toString(client.performRequest("GET", "/_template/" + template).getEntity()); + String string = EntityUtils.toString(client.performRequest(new Request("GET", "/_template/" + template)).getEntity()); response = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false); } catch (ResponseException e) { if (e.getResponse().getStatusLine().getStatusCode() == 404) { @@ -89,8 +89,9 @@ public static void waitForMlTemplates(RestClient client) throws InterruptedExcep public static void waitForPendingTasks(RestClient adminClient) throws Exception { ESTestCase.assertBusy(() -> { try { - Response response = adminClient.performRequest("GET", "/_cat/tasks", - Collections.singletonMap("detailed", "true")); + Request request = new Request("GET", "/_cat/tasks"); + request.addParameter("detailed", "true"); + Response response = adminClient.performRequest(request); // Check to see if there are tasks still active. We exclude the // list tasks // actions tasks form this otherwise we will always fail diff --git a/x-pack/plugin/deprecation/build.gradle b/x-pack/plugin/deprecation/build.gradle index d89eb62e88492..3746287d615ff 100644 --- a/x-pack/plugin/deprecation/build.gradle +++ b/x-pack/plugin/deprecation/build.gradle @@ -10,7 +10,7 @@ esplugin { archivesBaseName = 'x-pack-deprecation' dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') } run { diff --git a/x-pack/plugin/graph/build.gradle b/x-pack/plugin/graph/build.gradle index 3ddd2ebee04a2..2b0f592b72040 100644 --- a/x-pack/plugin/graph/build.gradle +++ b/x-pack/plugin/graph/build.gradle @@ -10,7 +10,7 @@ esplugin { archivesBaseName = 'x-pack-graph' dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/logstash/build.gradle b/x-pack/plugin/logstash/build.gradle index 5a9b5d6cbe5ea..2e158a90ac7ab 100644 --- a/x-pack/plugin/logstash/build.gradle +++ b/x-pack/plugin/logstash/build.gradle @@ -10,7 +10,7 @@ esplugin { archivesBaseName = 'x-pack-logstash' dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 4c3cc9eef9313..a673016133724 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -40,7 +40,7 @@ compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try, compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // This should not be here testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 05810b943befb..6fa47d1e45939 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -121,7 +122,7 @@ static void resolveAndValidateJobId(CloseJobAction.Request request, ClusterState private static void addJobAccordingToState(String jobId, PersistentTasksCustomMetaData tasksMetaData, List openJobs, List closingJobs, List failedJobs) { - JobState jobState = MlMetadata.getJobState(jobId, tasksMetaData); + JobState jobState = MlTasks.getJobState(jobId, tasksMetaData); switch (jobState) { case CLOSING: closingJobs.add(jobId); @@ -143,7 +144,7 @@ static TransportCloseJobAction.WaitForCloseRequest buildWaitForCloseRequest(List TransportCloseJobAction.WaitForCloseRequest waitForCloseRequest = new TransportCloseJobAction.WaitForCloseRequest(); for (String jobId : openJobIds) { - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(jobId, tasks); if (jobTask != null) { auditor.info(jobId, Messages.JOB_AUDIT_CLOSING); waitForCloseRequest.persistentTaskIds.add(jobTask.getId()); @@ -151,7 +152,7 @@ static TransportCloseJobAction.WaitForCloseRequest buildWaitForCloseRequest(List } } for (String jobId : closingJobIds) { - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(jobId, tasks); if (jobTask != null) { waitForCloseRequest.persistentTaskIds.add(jobTask.getId()); } @@ -180,7 +181,7 @@ static void validateJobAndTaskState(String jobId, MlMetadata mlMetadata, Persist Optional datafeed = mlMetadata.getDatafeedByJobId(jobId); if (datafeed.isPresent()) { - DatafeedState datafeedState = MlMetadata.getDatafeedState(datafeed.get().getId(), tasks); + DatafeedState datafeedState = MlTasks.getDatafeedState(datafeed.get().getId(), tasks); if (datafeedState != DatafeedState.STOPPED) { throw ExceptionsHelper.conflictStatusException("cannot close job [{}], datafeed hasn't been stopped", jobId); } @@ -230,7 +231,7 @@ protected void doExecute(Task task, CloseJobAction.Request request, ActionListen Set executorNodes = new HashSet<>(); PersistentTasksCustomMetaData tasks = state.metaData().custom(PersistentTasksCustomMetaData.TYPE); for (String resolvedJobId : request.getOpenJobIds()) { - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(resolvedJobId, tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(resolvedJobId, tasks); if (jobTask == null || jobTask.isAssigned() == false) { String message = "Cannot close job [" + resolvedJobId + "] because the job does not have an assigned node." + " Use force close to close the job"; @@ -312,7 +313,7 @@ private void forceCloseJob(ClusterState currentState, CloseJobAction.Request req final AtomicArray failures = new AtomicArray<>(numberOfJobs); for (String jobId : jobIdsToForceClose) { - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(jobId, tasks); if (jobTask != null) { auditor.info(jobId, Messages.JOB_AUDIT_FORCE_CLOSING); persistentTasksService.sendRemoveRequest(jobTask.getId(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java index be7ac84284d74..2582cb0082e51 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java @@ -22,8 +22,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.DeleteDatafeedAction; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; @@ -86,7 +86,7 @@ private void forceDeleteDatafeed(DeleteDatafeedAction.Request request, ClusterSt private void removeDatafeedTask(DeleteDatafeedAction.Request request, ClusterState state, ActionListener listener) { PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(request.getDatafeedId(), tasks); + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlTasks.getDatafeedTask(request.getDatafeedId(), tasks); if (datafeedTask == null) { listener.onResponse(true); } else { @@ -128,7 +128,7 @@ public ClusterState execute(ClusterState currentState) { MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) .removeDatafeed(request.getDatafeedId(), persistentTasks).build(); return ClusterState.builder(currentState).metaData( - MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()) + MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build()) .build(); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index ede92fbbab950..ad0d4068592dc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -26,9 +26,9 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; import org.elasticsearch.xpack.core.ml.job.persistence.JobStorageDeletionTask; @@ -177,7 +177,7 @@ private void removePersistentTask(String jobId, ClusterState currentState, ActionListener listener) { PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(jobId, tasks); if (jobTask == null) { listener.onResponse(null); } else { @@ -251,7 +251,7 @@ static boolean jobIsDeletedFromState(String jobId, ClusterState clusterState) { private static ClusterState buildNewClusterState(ClusterState currentState, MlMetadata.Builder builder) { ClusterState.Builder newState = ClusterState.builder(currentState); - newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()); + newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()); return newState.build(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java index 8e7a0fef41e89..ff611f9220250 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -70,7 +69,7 @@ public ClusterState execute(ClusterState currentState) { } ClusterState.Builder builder = ClusterState.builder(currentState); return builder.metaData(new MetaData.Builder(currentState.metaData()) - .putCustom(MLMetadataField.TYPE, mlMetadataBuilder.build())) + .putCustom(MlMetadata.TYPE, mlMetadataBuilder.build())) .build(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java index 41c8379c39fb8..1f7e55fc488a6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -69,8 +70,8 @@ protected void masterOperation(GetDatafeedsStatsAction.Request request, ClusterS private static GetDatafeedsStatsAction.Response.DatafeedStats getDatafeedStats(String datafeedId, ClusterState state, PersistentTasksCustomMetaData tasks) { - PersistentTasksCustomMetaData.PersistentTask task = MlMetadata.getDatafeedTask(datafeedId, tasks); - DatafeedState datafeedState = MlMetadata.getDatafeedState(datafeedId, tasks); + PersistentTasksCustomMetaData.PersistentTask task = MlTasks.getDatafeedTask(datafeedId, tasks); + DatafeedState datafeedState = MlTasks.getDatafeedState(datafeedId, tasks); DiscoveryNode node = null; String explanation = null; if (task != null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index 31f918dfc2571..28034d757dac2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -102,9 +103,9 @@ protected void taskOperation(GetJobsStatsAction.Request request, TransportOpenJo PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); Optional> stats = processManager.getStatistics(task); if (stats.isPresent()) { - PersistentTasksCustomMetaData.PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask pTask = MlTasks.getJobTask(jobId, tasks); DiscoveryNode node = state.nodes().get(pTask.getExecutorNode()); - JobState jobState = MlMetadata.getJobState(jobId, tasks); + JobState jobState = MlTasks.getJobState(jobId, tasks); String assignmentExplanation = pTask.getAssignment().getExplanation(); TimeValue openTime = durationToTimeValue(processManager.jobOpenTime(task)); gatherForecastStats(jobId, forecastStats -> { @@ -137,8 +138,8 @@ void gatherStatsForClosedJobs(MlMetadata mlMetadata, GetJobsStatsAction.Request String jobId = jobIds.get(i); gatherForecastStats(jobId, forecastStats -> { gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> { - JobState jobState = MlMetadata.getJobState(jobId, tasks); - PersistentTasksCustomMetaData.PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); + JobState jobState = MlTasks.getJobState(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask pTask = MlTasks.getJobTask(jobId, tasks); String assignmentExplanation = null; if (pTask != null) { assignmentExplanation = pTask.getAssignment().getExplanation(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java index 398a1007ff9c4..3ca3c3154506a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; import org.elasticsearch.xpack.ml.MachineLearning; @@ -43,7 +43,7 @@ public TransportIsolateDatafeedAction(Settings settings, TransportService transp protected void doExecute(Task task, IsolateDatafeedAction.Request request, ActionListener listener) { final ClusterState state = clusterService.state(); PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(request.getDatafeedId(), tasks); + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlTasks.getDatafeedTask(request.getDatafeedId(), tasks); if (datafeedTask == null || datafeedTask.getExecutorNode() == null) { // No running datafeed task to isolate diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java index bd489588da3c3..07bb6152e8c1d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.JobTaskRequest; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.JobManager; @@ -55,7 +55,7 @@ protected void doExecute(Task task, Request request, ActionListener li ClusterState state = clusterService.state(); JobManager.getJobOrThrowIfUnknown(jobId, state); PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(jobId, tasks); if (jobTask == null || jobTask.isAssigned() == false) { String message = "Cannot perform requested action because job [" + jobId + "] is not open"; listener.onFailure(ExceptionsHelper.conflictStatusException(message)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java index 40cec95fae211..b40f0368a1554 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -59,7 +59,7 @@ protected void taskOperation(KillProcessAction.Request request, TransportOpenJob protected void doExecute(Task task, KillProcessAction.Request request, ActionListener listener) { DiscoveryNodes nodes = clusterService.state().nodes(); PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(request.getJobId(), tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(request.getJobId(), tasks); if (jobTask == null || jobTask.getExecutorNode() == null) { logger.debug("[{}] Cannot kill the process because job is not open", request.getJobId()); listener.onResponse(new KillProcessAction.Response(false)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index e82e53458cb32..d378b19aad57a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; @@ -489,7 +490,7 @@ public void onFailure(Exception e) { // Step 4. Start job task ActionListener establishedMemoryUpdateListener = ActionListener.wrap( - response -> persistentTasksService.sendStartRequest(MlMetadata.jobTaskId(jobParams.getJobId()), + response -> persistentTasksService.sendStartRequest(MlTasks.jobTaskId(jobParams.getJobId()), OpenJobAction.TASK_NAME, jobParams, finalListener), listener::onFailure ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java index 926395d65132c..1b90d5cb04b3a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.PersistJobAction; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; @@ -61,7 +61,7 @@ protected void doExecute(Task task, PersistJobAction.Request request, ActionList // TODO Remove this overridden method in 7.0.0 DiscoveryNodes nodes = clusterService.state().nodes(); PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(request.getJobId(), tasks); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(request.getJobId(), tasks); if (jobTask == null || jobTask.getExecutorNode() == null) { logger.debug("[{}] Cannot persist the job because the job is not open", request.getJobId()); listener.onResponse(new PersistJobAction.Response(false)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java index 88c72578023f9..7a7deac0136a7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -93,6 +92,7 @@ protected void masterOperation(PutDatafeedAction.Request request, ClusterState s .indices(request.getDatafeed().getIndices().toArray(new String[0])) .privileges(SearchAction.NAME) .build()); + privRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); } else { @@ -108,8 +108,8 @@ private void handlePrivsResponse(String username, PutDatafeedAction.Request requ } else { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); - for (HasPrivilegesResponse.IndexPrivileges index : response.getIndexPrivileges()) { - builder.field(index.getIndex()); + for (HasPrivilegesResponse.ResourcePrivileges index : response.getIndexPrivileges()) { + builder.field(index.getResource()); builder.map(index.getPrivileges()); } builder.endObject(); @@ -149,7 +149,7 @@ private ClusterState putDatafeed(PutDatafeedAction.Request request, Map - persistentTasksService.sendStartRequest(MLMetadataField.datafeedTaskId(params.getDatafeedId()), + persistentTasksService.sendStartRequest(MlTasks.datafeedTaskId(params.getDatafeedId()), StartDatafeedAction.TASK_NAME, params, listener) , listener::onFailure)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index cf7350a870e97..1cc6d97158959 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -25,8 +25,8 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; @@ -79,7 +79,7 @@ static void resolveDataFeedIds(StopDatafeedAction.Request request, MlMetadata ml Set expandedDatafeedIds = mlMetadata.expandDatafeedIds(request.getDatafeedId(), request.allowNoDatafeeds()); for (String expandedDatafeedId : expandedDatafeedIds) { validateDatafeedTask(expandedDatafeedId, mlMetadata); - addDatafeedTaskIdAccordingToState(expandedDatafeedId, MlMetadata.getDatafeedState(expandedDatafeedId, tasks), + addDatafeedTaskIdAccordingToState(expandedDatafeedId, MlTasks.getDatafeedState(expandedDatafeedId, tasks), startedDatafeedIds, stoppingDatafeedIds); } } @@ -155,7 +155,7 @@ private void normalStopDatafeed(Task task, StopDatafeedAction.Request request, A List startedDatafeeds, List stoppingDatafeeds) { Set executorNodes = new HashSet<>(); for (String datafeedId : startedDatafeeds) { - PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(datafeedId, tasks); + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlTasks.getDatafeedTask(datafeedId, tasks); if (datafeedTask == null || datafeedTask.isAssigned() == false) { String message = "Cannot stop datafeed [" + datafeedId + "] because the datafeed does not have an assigned node." + " Use force stop to stop the datafeed"; @@ -171,8 +171,8 @@ private void normalStopDatafeed(Task task, StopDatafeedAction.Request request, A // wait for started and stopping datafeeds // Map datafeedId -> datafeed task Id. List allDataFeedsToWaitFor = Stream.concat( - startedDatafeeds.stream().map(id -> MLMetadataField.datafeedTaskId(id)), - stoppingDatafeeds.stream().map(id -> MLMetadataField.datafeedTaskId(id))) + startedDatafeeds.stream().map(MlTasks::datafeedTaskId), + stoppingDatafeeds.stream().map(MlTasks::datafeedTaskId)) .collect(Collectors.toList()); ActionListener finalListener = ActionListener.wrap( @@ -188,7 +188,7 @@ private void forceStopDatafeed(final StopDatafeedAction.Request request, final A final AtomicArray failures = new AtomicArray<>(startedDatafeeds.size()); for (String datafeedId : startedDatafeeds) { - PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(datafeedId, tasks); + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlTasks.getDatafeedTask(datafeedId, tasks); if (datafeedTask != null) { persistentTasksService.sendRemoveRequest(datafeedTask.getId(), new ActionListener>() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java index 4e43cbb185330..8cf917c4405ea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; @@ -76,7 +75,7 @@ public ClusterState execute(ClusterState currentState) { .updateDatafeed(update, persistentTasks, headers).build(); updatedDatafeed = newMetadata.getDatafeed(update.getId()); return ClusterState.builder(currentState).metaData( - MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()).build(); + MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build()).build(); } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java index 338c111401acf..3d4d66eba92a3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java @@ -21,6 +21,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -257,7 +258,7 @@ private String getJobId(TransportStartDatafeedAction.DatafeedTask task) { } private JobState getJobState(PersistentTasksCustomMetaData tasks, TransportStartDatafeedAction.DatafeedTask datafeedTask) { - return MlMetadata.getJobState(getJobId(datafeedTask), tasks); + return MlTasks.getJobState(getJobId(datafeedTask), tasks); } private TimeValue computeNextDelay(long next) { @@ -385,7 +386,7 @@ private long executeRealTime() throws Exception { private void closeJob() { ClusterState clusterState = clusterService.state(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - JobState jobState = MlMetadata.getJobState(getJobId(), tasks); + JobState jobState = MlTasks.getJobState(getJobId(), tasks); if (jobState != JobState.OPENED) { logger.debug("[{}] No need to auto-close job as job state is [{}]", getJobId(), jobState); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java index bebf0f3935d92..a6be047648623 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; @@ -35,7 +36,7 @@ public DatafeedNodeSelector(ClusterState clusterState, IndexNameExpressionResolv MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); this.datafeed = mlMetadata.getDatafeed(datafeedId); - this.jobTask = MlMetadata.getJobTask(datafeed.getJobId(), tasks); + this.jobTask = MlTasks.getJobTask(datafeed.getJobId(), tasks); this.clusterState = Objects.requireNonNull(clusterState); this.resolver = Objects.requireNonNull(resolver); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index f7fab029c8803..fcc9151b755a6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -30,9 +30,9 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; @@ -169,7 +169,7 @@ public QueryPage expandJobs(String expression, boolean allowNoJobs, Cluster public JobState getJobState(String jobId) { PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - return MlMetadata.getJobState(jobId, tasks); + return MlTasks.getJobState(jobId, tasks); } /** @@ -411,7 +411,7 @@ private void auditJobUpdatedIfNotInternal(UpdateJobAction.Request request) { private boolean isJobOpen(ClusterState clusterState, String jobId) { PersistentTasksCustomMetaData persistentTasks = clusterState.metaData().custom(PersistentTasksCustomMetaData.TYPE); - JobState jobState = MlMetadata.getJobState(jobId, persistentTasks); + JobState jobState = MlTasks.getJobState(jobId, persistentTasks); return jobState == JobState.OPENED; } @@ -618,7 +618,7 @@ private static MlMetadata.Builder createMlMetadataBuilder(ClusterState currentSt private static ClusterState buildNewClusterState(ClusterState currentState, MlMetadata.Builder builder) { XPackPlugin.checkReadyForXPackCustomMetadata(currentState); ClusterState.Builder newState = ClusterState.builder(currentState); - newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()); + newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()); return newState.build(); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java index 5893a863fe38f..5278171d43868 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; import org.elasticsearch.xpack.core.XPackField; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -274,7 +273,7 @@ private void givenJobs(List jobs, List openJobs = new ArrayList<>(); @@ -181,7 +181,7 @@ public void testResolve_givenJobIdFailed() { addJobTask("job_id_failed", null, JobState.FAILED, tasksBuilder); ClusterState cs1 = ClusterState.builder(new ClusterName("_name")).metaData(new MetaData.Builder() - .putCustom(MLMetadataField.TYPE, mlBuilder.build()).putCustom(PersistentTasksCustomMetaData.TYPE, + .putCustom(MlMetadata.TYPE, mlBuilder.build()).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())).build(); List openJobs = new ArrayList<>(); @@ -216,7 +216,7 @@ public void testResolve_withSpecificJobIds() { // closed job has no task ClusterState cs1 = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build()) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())) .build(); @@ -256,7 +256,7 @@ public void testDoExecute_whenNothingToClose() { addJobTask("foo", null, JobState.CLOSED, tasksBuilder); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build()) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())) .build(); @@ -311,9 +311,9 @@ public void testBuildWaitForCloseRequest() { public static void addTask(String datafeedId, long startTime, String nodeId, DatafeedState state, PersistentTasksCustomMetaData.Builder tasks) { - tasks.addTask(MLMetadataField.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, + tasks.addTask(MlTasks.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, new StartDatafeedAction.DatafeedParams(datafeedId, startTime), new Assignment(nodeId, "test assignment")); - tasks.updateTaskState(MLMetadataField.datafeedTaskId(datafeedId), state); + tasks.updateTaskState(MlTasks.datafeedTaskId(datafeedId), state); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobActionTests.java index 1bf40ac881ee6..7464348adb9aa 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; @@ -21,7 +20,7 @@ public void testJobIsDeletedFromState() { MlMetadata mlMetadata = MlMetadata.EMPTY_METADATA; ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlMetadata)) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlMetadata)) .build(); assertTrue(TransportDeleteJobAction.jobIsDeletedFromState("job_id_1", clusterState)); @@ -30,7 +29,7 @@ public void testJobIsDeletedFromState() { mlBuilder.putJob(BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()), false); mlMetadata = mlBuilder.build(); clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlMetadata)) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlMetadata)) .build(); assertFalse(TransportDeleteJobAction.jobIsDeletedFromState("job_id_1", clusterState)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index dd8ddf3aa62ad..02bfb1b326fd9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -32,9 +32,9 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; @@ -317,7 +317,7 @@ public void testSelectLeastLoadedMlNode_maxConcurrentOpeningJobs() { assertTrue(result.getExplanation().contains("because node exceeds [2] the maximum number of jobs [2] in opening state")); tasksBuilder = PersistentTasksCustomMetaData.builder(tasks); - tasksBuilder.reassignTask(MlMetadata.jobTaskId("job_id6"), new Assignment("_node_id3", "test assignment")); + tasksBuilder.reassignTask(MlTasks.jobTaskId("job_id6"), new Assignment("_node_id3", "test assignment")); tasks = tasksBuilder.build(); csBuilder = ClusterState.builder(cs); @@ -328,7 +328,7 @@ public void testSelectLeastLoadedMlNode_maxConcurrentOpeningJobs() { assertTrue(result.getExplanation().contains("because node exceeds [2] the maximum number of jobs [2] in opening state")); tasksBuilder = PersistentTasksCustomMetaData.builder(tasks); - tasksBuilder.updateTaskState(MlMetadata.jobTaskId("job_id6"), null); + tasksBuilder.updateTaskState(MlTasks.jobTaskId("job_id6"), null); tasks = tasksBuilder.build(); csBuilder = ClusterState.builder(cs); @@ -354,7 +354,7 @@ public void testSelectLeastLoadedMlNode_concurrentOpeningJobsAndStaleFailedJob() PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id1", "_node_id1", JobState.fromString("failed"), tasksBuilder); // This will make the allocation stale for job_id1 - tasksBuilder.reassignTask(MlMetadata.jobTaskId("job_id1"), new Assignment("_node_id1", "test assignment")); + tasksBuilder.reassignTask(MlTasks.jobTaskId("job_id1"), new Assignment("_node_id1", "test assignment")); addJobTask("job_id2", "_node_id1", null, tasksBuilder); addJobTask("job_id3", "_node_id2", null, tasksBuilder); addJobTask("job_id4", "_node_id2", null, tasksBuilder); @@ -675,10 +675,10 @@ public void testNodeNameAndMlAttributes() { } public static void addJobTask(String jobId, String nodeId, JobState jobState, PersistentTasksCustomMetaData.Builder builder) { - builder.addTask(MlMetadata.jobTaskId(jobId), OpenJobAction.TASK_NAME, new OpenJobAction.JobParams(jobId), + builder.addTask(MlTasks.jobTaskId(jobId), OpenJobAction.TASK_NAME, new OpenJobAction.JobParams(jobId), new Assignment(nodeId, "test assignment")); if (jobState != null) { - builder.updateTaskState(MlMetadata.jobTaskId(jobId), new JobTaskState(jobState, builder.getLastAllocationId())); + builder.updateTaskState(MlTasks.jobTaskId(jobId), new JobTaskState(jobState, builder.getLastAllocationId())); } } @@ -717,7 +717,7 @@ private void addJobAndIndices(MetaData.Builder metaData, RoutingTable.Builder ro Job job = jobCreator.apply(jobId); mlMetadata.putJob(job, false); } - metaData.putCustom(MLMetadataField.TYPE, mlMetadata.build()); + metaData.putCustom(MlMetadata.TYPE, mlMetadata.build()); } private ClusterState getClusterStateWithMappingsWithMetaData(Map namesAndVersions) throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java index 934642986de96..a15c0e97b97f1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java @@ -7,8 +7,8 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -29,9 +29,9 @@ public class TransportStopDatafeedActionTests extends ESTestCase { public void testValidate() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); - tasksBuilder.addTask(MLMetadataField.datafeedTaskId("foo"), StartDatafeedAction.TASK_NAME, + tasksBuilder.addTask(MlTasks.datafeedTaskId("foo"), StartDatafeedAction.TASK_NAME, new StartDatafeedAction.DatafeedParams("foo", 0L), new PersistentTasksCustomMetaData.Assignment("node_id", "")); - tasksBuilder.updateTaskState(MLMetadataField.datafeedTaskId("foo"), DatafeedState.STARTED); + tasksBuilder.updateTaskState(MlTasks.datafeedTaskId("foo"), DatafeedState.STARTED); tasksBuilder.build(); Job job = createDatafeedJob().build(new Date()); @@ -118,9 +118,9 @@ public void testResolveDataFeedIds_GivenAll() { public static void addTask(String datafeedId, long startTime, String nodeId, DatafeedState state, PersistentTasksCustomMetaData.Builder taskBuilder) { - taskBuilder.addTask(MLMetadataField.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, + taskBuilder.addTask(MlTasks.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, new StartDatafeedAction.DatafeedParams(datafeedId, startTime), new PersistentTasksCustomMetaData.Assignment(nodeId, "test assignment")); - taskBuilder.updateTaskState(MLMetadataField.datafeedTaskId(datafeedId), state); + taskBuilder.updateTaskState(MlTasks.datafeedTaskId(datafeedId), state); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java index 6ce03d22b64f0..a9dec7c66d4b6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; @@ -93,7 +92,7 @@ public void setUpTests() { Collections.emptyMap(), Collections.emptySet(), Version.CURRENT)) .build(); ClusterState.Builder cs = ClusterState.builder(new ClusterName("cluster_name")) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlMetadata.build()) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlMetadata.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasks)) .nodes(nodes); @@ -254,7 +253,7 @@ public void testDatafeedTaskWaitsUntilJobIsOpened() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, MlMetadata.getMlMetadata(clusterService.state())) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, MlMetadata.getMlMetadata(clusterService.state())) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); when(clusterService.state()).thenReturn(cs.build()); @@ -269,7 +268,7 @@ public void testDatafeedTaskWaitsUntilJobIsOpened() { addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); addJobTask("another_job", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder anotherJobCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, MlMetadata.getMlMetadata(clusterService.state())) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, MlMetadata.getMlMetadata(clusterService.state())) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", anotherJobCs.build(), cs.build())); @@ -280,7 +279,7 @@ public void testDatafeedTaskWaitsUntilJobIsOpened() { tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder jobOpenedCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, MlMetadata.getMlMetadata(clusterService.state())) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, MlMetadata.getMlMetadata(clusterService.state())) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged( @@ -294,7 +293,7 @@ public void testDatafeedTaskStopsBecauseJobFailedWhileOpening() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, MlMetadata.getMlMetadata(clusterService.state())) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, MlMetadata.getMlMetadata(clusterService.state())) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); when(clusterService.state()).thenReturn(cs.build()); @@ -308,7 +307,7 @@ public void testDatafeedTaskStopsBecauseJobFailedWhileOpening() { tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.FAILED, tasksBuilder); ClusterState.Builder updatedCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, MlMetadata.getMlMetadata(clusterService.state())) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, MlMetadata.getMlMetadata(clusterService.state())) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", updatedCs.build(), cs.build())); @@ -322,7 +321,7 @@ public void testDatafeedGetsStoppedWhileWaitingForJobToOpen() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, MlMetadata.getMlMetadata(clusterService.state())) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, MlMetadata.getMlMetadata(clusterService.state())) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); when(clusterService.state()).thenReturn(cs.build()); @@ -340,7 +339,7 @@ public void testDatafeedGetsStoppedWhileWaitingForJobToOpen() { tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder updatedCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, MlMetadata.getMlMetadata(clusterService.state())) + .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, MlMetadata.getMlMetadata(clusterService.state())) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", cs.build(), updatedCs.build())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index f3fa804bb27b9..3a6082c6cf057 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -27,8 +27,8 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; @@ -257,7 +257,7 @@ public void testSelectNode_jobTaskStale() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask(job.getId(), nodeId, JobState.OPENED, tasksBuilder); // Set to lower allocationId, so job task is stale: - tasksBuilder.updateTaskState(MlMetadata.jobTaskId(job.getId()), new JobTaskState(JobState.OPENED, 0)); + tasksBuilder.updateTaskState(MlTasks.jobTaskId(job.getId()), new JobTaskState(JobState.OPENED, 0)); tasks = tasksBuilder.build(); givenClusterState("foo", 1, 0); @@ -319,7 +319,7 @@ private void givenClusterState(String index, int numberOfShards, int numberOfRep clusterState = ClusterState.builder(new ClusterName("cluster_name")) .metaData(new MetaData.Builder() - .putCustom(MLMetadataField.TYPE, mlMetadata) + .putCustom(MlMetadata.TYPE, mlMetadata) .putCustom(PersistentTasksCustomMetaData.TYPE, tasks) .put(indexMetaData, false)) .nodes(nodes) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index 83facf4c1f2de..9ab4907b2cd17 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; @@ -206,7 +206,7 @@ public void testDedicatedMlNode() throws Exception { assertBusy(() -> { ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId)); + PersistentTask task = tasks.getTask(MlTasks.jobTaskId(jobId)); DiscoveryNode node = clusterState.nodes().resolveNode(task.getExecutorNode()); assertThat(node.getAttributes(), hasEntry(MachineLearning.ML_ENABLED_NODE_ATTR, "true")); @@ -390,7 +390,7 @@ private void assertJobTask(String jobId, JobState expectedState, boolean hasExec ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); assertEquals(1, tasks.taskMap().size()); - PersistentTask task = MlMetadata.getJobTask(jobId, tasks); + PersistentTask task = MlTasks.getJobTask(jobId, tasks); assertNotNull(task); if (hasExecutorNode) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java index 14ec4813a749e..f389bc4b74913 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction; @@ -110,7 +109,7 @@ private ClusterState markJobAsDeleted(String jobId, ClusterState currentState) { builder.markJobAsDeleted(jobId, tasks, true); ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()) + return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()) .build(); } @@ -119,7 +118,7 @@ private ClusterState removeJobFromClusterState(String jobId, ClusterState curren builder.deleteJob(jobId, currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE)); ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()) + return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()) .build(); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java index 68ce5da5ca176..1b1c39c3bcf3c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/TooManyJobsIT.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; @@ -57,7 +57,7 @@ public void testCloseFailedJob() throws Exception { PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); assertEquals(1, tasks.taskMap().size()); // now just double check that the first job is still opened: - PersistentTasksCustomMetaData.PersistentTask task = tasks.getTask(MlMetadata.jobTaskId("close-failed-job-1")); + PersistentTasksCustomMetaData.PersistentTask task = tasks.getTask(MlTasks.jobTaskId("close-failed-job-1")); assertEquals(JobState.OPENED, ((JobTaskState) task.getState()).getState()); } @@ -121,7 +121,7 @@ private void verifyMaxNumberOfJobsLimit(int numNodes, int maxNumberOfJobsPerNode for (Client client : clients()) { PersistentTasksCustomMetaData tasks = client.admin().cluster().prepareState().get().getState() .getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - assertEquals(MlMetadata.getJobState(job.getId(), tasks), JobState.OPENED); + assertEquals(MlTasks.getJobState(job.getId(), tasks), JobState.OPENED); } }); return; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java index cf925963c198a..e15b5828df96a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.PutJobAction; @@ -94,7 +93,7 @@ public void testGetJobOrThrowIfUnknown_GivenKnownJob() { Job job = buildJobBuilder("foo").build(); MlMetadata mlMetadata = new MlMetadata.Builder().putJob(job, false).build(); ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, mlMetadata)).build(); + .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata)).build(); assertEquals(job, JobManager.getJobOrThrowIfUnknown("foo", cs)); } @@ -105,7 +104,7 @@ public void testExpandJobs_GivenAll() { mlMetadata.putJob(buildJobBuilder(Integer.toString(i)).build(), false); } ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, mlMetadata.build())).build(); + .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata.build())).build(); JobManager jobManager = createJobManager(); QueryPage result = jobManager.expandJobs("_all", true, clusterState); @@ -161,7 +160,7 @@ public void testPutJob_ThrowsIfJobExists() throws IOException { MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); mlMetadata.putJob(buildJobBuilder("foo").build(), false); ClusterState clusterState = ClusterState.builder(new ClusterName("name")) - .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, mlMetadata.build())).build(); + .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata.build())).build(); jobManager.putJob(putJobRequest, analysisRegistry, clusterState, new ActionListener() { @Override @@ -215,7 +214,7 @@ public void testNotifyFilterChanged() { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metaData(MetaData.builder() .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build()) - .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); when(clusterService.state()).thenReturn(clusterState); @@ -267,7 +266,7 @@ public void testNotifyFilterChangedGivenOnlyAddedItems() { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metaData(MetaData.builder() - .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); when(clusterService.state()).thenReturn(clusterState); @@ -297,7 +296,7 @@ public void testNotifyFilterChangedGivenOnlyRemovedItems() { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metaData(MetaData.builder() - .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); when(clusterService.state()).thenReturn(clusterState); @@ -331,7 +330,7 @@ public void testUpdateProcessOnCalendarChanged() { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metaData(MetaData.builder() .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build()) - .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); when(clusterService.state()).thenReturn(clusterState); @@ -370,7 +369,7 @@ public void testUpdateProcessOnCalendarChanged_GivenGroups() { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metaData(MetaData.builder() .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build()) - .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); when(clusterService.state()).thenReturn(clusterState); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index ef87fe392dd75..e33dbc69db607 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -43,13 +43,13 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.mockito.ArgumentCaptor; import java.io.IOException; @@ -252,7 +252,7 @@ public void testBuckets_OneBucketNoInterim() throws IOException { BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(1.0); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -286,7 +286,7 @@ public void testBuckets_OneBucketInterim() throws IOException { BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(5.1) .includeInterim(true); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -322,7 +322,7 @@ public void testBuckets_UsingBuilder() throws IOException { bq.anomalyScoreThreshold(5.1); bq.includeInterim(true); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -368,7 +368,7 @@ public void testBucket_OneBucketNoExpand() throws IOException { BucketsQueryBuilder bq = new BucketsQueryBuilder(); bq.timestamp(Long.toString(now.getTime())); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] bucketHolder = new QueryPage[1]; provider.buckets(jobId, bq, q -> bucketHolder[0] = q, e -> {}, client); assertThat(bucketHolder[0].count(), equalTo(1L)); @@ -409,7 +409,7 @@ public void testRecords() throws IOException { .epochEnd(String.valueOf(now.getTime())).includeInterim(true).sortField(sortfield) .recordScore(2.2); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.records(jobId, rqb, page -> holder[0] = page, RuntimeException::new, client); QueryPage recordPage = holder[0]; @@ -462,7 +462,7 @@ public void testRecords_UsingBuilder() throws IOException { rqb.sortField(sortfield); rqb.recordScore(2.2); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.records(jobId, rqb, page -> holder[0] = page, RuntimeException::new, client); QueryPage recordPage = holder[0]; @@ -507,7 +507,7 @@ public void testBucketRecords() throws IOException { Client client = getMockedClient(qb -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.bucketRecords(jobId, bucket, from, size, true, sortfield, true, page -> holder[0] = page, RuntimeException::new, client); @@ -568,7 +568,7 @@ public void testCategoryDefinitions() throws IOException { Client client = getMockedClient(q -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.categoryDefinitions(jobId, null, false, from, size, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); @@ -590,7 +590,7 @@ public void testCategoryDefinition() throws IOException { SearchResponse response = createSearchResponse(Collections.singletonList(source)); Client client = getMockedClient(q -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.categoryDefinitions(jobId, categoryId, false, null, null, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); @@ -632,7 +632,7 @@ public void testInfluencers_NoInterim() throws IOException { Client client = getMockedClient(q -> qbHolder[0] = q, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).includeInterim(false).build(); provider.influencers(jobId, query, page -> holder[0] = page, RuntimeException::new, client); @@ -692,7 +692,7 @@ public void testInfluencers_WithInterim() throws IOException { Client client = getMockedClient(q -> qbHolder[0] = q, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).start("0").end("0").sortField("sort") .sortDescending(true).influencerScoreThreshold(0.0).includeInterim(true).build(); @@ -747,7 +747,7 @@ public void testModelSnapshots() throws IOException { Client client = getMockedClient(qb -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.modelSnapshots(jobId, from, size, r -> holder[0] = r, RuntimeException::new); QueryPage page = holder[0]; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index f2c18ec9d5a4e..c31ebd4bc2c86 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -195,7 +195,7 @@ public void testBulkRequestExecutesWhenReachMaxDocs() { verifyNoMoreInteractions(client); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) private Client mockClient(ArgumentCaptor captor) { Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java index eedc42148b151..5f8b685f8442c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java @@ -65,7 +65,6 @@ private Bucket generateBucket(Date timestamp) throws IOException { } @Before - @SuppressWarnings("unchecked") public void setUpMocks() throws IOException { MockitoAnnotations.initMocks(this); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java index 82441c424674d..4195e93439d60 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -212,7 +211,7 @@ private void givenJobs(List jobs) { MlMetadata mlMetadata = mock(MlMetadata.class); when(mlMetadata.getJobs()).thenReturn(jobsMap); MetaData metadata = mock(MetaData.class); - when(metadata.custom(MLMetadataField.TYPE)).thenReturn(mlMetadata); + when(metadata.custom(MlMetadata.TYPE)).thenReturn(mlMetadata); when(clusterState.getMetaData()).thenReturn(metadata); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java index 2f08574e1a6da..af9ec8b84a6bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobTests; @@ -165,7 +164,7 @@ private void givenJobs(List jobs) { MlMetadata mlMetadata = mock(MlMetadata.class); when(mlMetadata.getJobs()).thenReturn(jobsMap); MetaData metadata = mock(MetaData.class); - when(metadata.custom(MLMetadataField.TYPE)).thenReturn(mlMetadata); + when(metadata.custom(MlMetadata.TYPE)).thenReturn(mlMetadata); when(clusterState.getMetaData()).thenReturn(metadata); } diff --git a/x-pack/plugin/monitoring/build.gradle b/x-pack/plugin/monitoring/build.gradle index 3fde6cd8c3775..a452ef09a20ff 100644 --- a/x-pack/plugin/monitoring/build.gradle +++ b/x-pack/plugin/monitoring/build.gradle @@ -13,7 +13,7 @@ esplugin { archivesBaseName = 'x-pack-monitoring' dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // monitoring deps @@ -66,7 +66,7 @@ task internalClusterTest(type: RandomizedTestingTask, include '**/*IT.class' systemProperty 'es.set.netty.runtime.available.processors', 'false' } -check.dependsOn internalClusterTest +check.dependsOn internalClusterTest internalClusterTest.mustRunAfter test // also add an "alias" task to make typing on the command line easier task icTest { diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java index 168ccff44c4bb..a804703a53046 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResource.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -311,13 +312,15 @@ protected Tuple checkForResource(final RestClient clien final Set exists, final Set doesNotExist) { logger.trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType); - final Set expectedResponseCodes = Sets.union(exists, doesNotExist); + + final Request request = new Request("GET", resourceBasePath + "/" + resourceName); + addParameters(request); // avoid exists and DNE parameters from being an exception by default - final Map getParameters = new HashMap<>(parameters); - getParameters.put("ignore", expectedResponseCodes.stream().map(i -> i.toString()).collect(Collectors.joining(","))); + final Set expectedResponseCodes = Sets.union(exists, doesNotExist); + request.addParameter("ignore", expectedResponseCodes.stream().map(i -> i.toString()).collect(Collectors.joining(","))); try { - final Response response = client.performRequest("GET", resourceBasePath + "/" + resourceName, getParameters); + final Response response = client.performRequest(request); final int statusCode = response.getStatusLine().getStatusCode(); // checking the content is the job of whoever called this function by checking the tuple's response @@ -385,8 +388,12 @@ protected boolean putResource(final RestClient client, final Logger logger, boolean success = false; + final Request request = new Request("PUT", resourceBasePath + "/" + resourceName); + addParameters(request); + request.setEntity(body.get()); + try { - final Response response = client.performRequest("PUT", resourceBasePath + "/" + resourceName, parameters, body.get()); + final Response response = client.performRequest(request); final int statusCode = response.getStatusLine().getStatusCode(); // 200 or 201 @@ -431,12 +438,15 @@ protected boolean deleteResource(final RestClient client, final Logger logger, boolean success = false; - // avoid 404 being an exception by default - final Map deleteParameters = new HashMap<>(parameters); - deleteParameters.putIfAbsent("ignore", Integer.toString(RestStatus.NOT_FOUND.getStatus())); + Request request = new Request("DELETE", resourceBasePath + "/" + resourceName); + addParameters(request); + if (false == parameters.containsKey("ignore")) { + // avoid 404 being an exception by default + request.addParameter("ignore", Integer.toString(RestStatus.NOT_FOUND.getStatus())); + } try { - final Response response = client.performRequest("DELETE", resourceBasePath + "/" + resourceName, deleteParameters); + final Response response = client.performRequest(request); final int statusCode = response.getStatusLine().getStatusCode(); // 200 or 404 (not found is just as good as deleting it!) @@ -498,4 +508,9 @@ protected boolean shouldReplaceResource(final Response response, final XContent return true; } + private void addParameters(Request request) { + for (Map.Entry param : parameters.entrySet()) { + request.addParameter(param.getKey(), param.getValue()); + } + } } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResource.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResource.java index bbc86d377748f..eec9162e7edb0 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResource.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResource.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.logging.Loggers; @@ -16,7 +17,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import java.io.IOException; -import java.util.Collections; import java.util.Map; import java.util.Objects; @@ -27,11 +27,6 @@ public class VersionHttpResource extends HttpResource { private static final Logger logger = Loggers.getLogger(VersionHttpResource.class); - /** - * The parameters to pass with every version request to limit the output to just the version number. - */ - public static final Map PARAMETERS = Collections.singletonMap("filter_path", "version.number"); - /** * The minimum supported version of Elasticsearch. */ @@ -59,7 +54,9 @@ protected boolean doCheckAndPublish(final RestClient client) { logger.trace("checking [{}] to ensure that it supports the minimum version [{}]", resourceOwnerName, minimumVersion); try { - return validateVersion(client.performRequest("GET", "/", PARAMETERS)); + Request request = new Request("GET", "/"); + request.addParameter("filter_path", "version.number"); + return validateVersion(client.performRequest(request)); } catch (IOException | RuntimeException e) { logger.error( (Supplier)() -> diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java index 208f878543f56..ef365042bd584 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/AbstractPublishableHttpResourceTestCase.java @@ -11,6 +11,7 @@ import org.apache.http.StatusLine; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -20,6 +21,8 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; import java.io.IOException; import java.util.Map; @@ -30,10 +33,10 @@ import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.GET_DOES_NOT_EXIST; import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.GET_EXISTS; import static org.hamcrest.Matchers.is; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; +import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import static org.mockito.Mockito.verify; /** * Base test helper for any {@link PublishableHttpResource}. @@ -87,7 +90,9 @@ protected void assertCheckWithException(final PublishableHttpResource resource, final ResponseException responseException = responseException("GET", endpoint, failedCheckStatus()); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); - when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenThrow(e); + Request request = new Request("GET", endpoint); + addParameters(request, getParameters(resource.getParameters())); + when(client.performRequest(request)).thenThrow(e); assertThat(resource.doCheck(client), is(CheckResponse.ERROR)); } @@ -123,7 +128,9 @@ protected void assertCheckAsDeleteWithException(final PublishableHttpResource re final ResponseException responseException = responseException("DELETE", endpoint, failedCheckStatus()); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); - when(client.performRequest("DELETE", endpoint, deleteParameters(resource.getParameters()))).thenThrow(e); + Request request = new Request("DELETE", endpoint); + addParameters(request, deleteParameters(resource.getParameters())); + when(client.performRequest(request)).thenThrow(e); assertThat(resource.doCheck(client), is(CheckResponse.ERROR)); } @@ -173,9 +180,15 @@ protected void assertPublishWithException(final PublishableHttpResource resource final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected")); - when(client.performRequest(eq("PUT"), eq(endpoint), eq(resource.getParameters()), any(bodyType))).thenThrow(e); + when(client.performRequest(Mockito.any(Request.class))).thenThrow(e); assertThat(resource.doPublish(client), is(false)); + ArgumentCaptor request = ArgumentCaptor.forClass(Request.class); + verify(client).performRequest(request.capture()); + assertThat(request.getValue().getMethod(), is("PUT")); + assertThat(request.getValue().getEndpoint(), is(endpoint)); + assertThat(request.getValue().getParameters(), is(resource.getParameters())); + assertThat(request.getValue().getEntity(), instanceOf(bodyType)); } protected void assertParameters(final PublishableHttpResource resource) { @@ -244,7 +257,9 @@ protected void doCheckWithStatusCode(final PublishableHttpResource resource, fin final String endpoint, final CheckResponse expected, final Response response) throws IOException { - when(client.performRequest("GET", endpoint, expectedParameters)).thenReturn(response); + Request request = new Request("GET", endpoint); + addParameters(request, expectedParameters); + when(client.performRequest(request)).thenReturn(response); assertThat(resource.doCheck(client), is(expected)); } @@ -257,9 +272,14 @@ private void doPublishWithStatusCode(final PublishableHttpResource resource, fin final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("GET", endpoint, status); - when(client.performRequest(eq("PUT"), eq(endpoint), eq(resource.getParameters()), any(bodyType))).thenReturn(response); + ArgumentCaptor request = ArgumentCaptor.forClass(Request.class); + when(client.performRequest(request.capture())).thenReturn(response); assertThat(resource.doPublish(client), is(expected)); + assertThat(request.getValue().getMethod(), is("PUT")); + assertThat(request.getValue().getEndpoint(), is(endpoint)); + assertThat(request.getValue().getParameters(), is(resource.getParameters())); + assertThat(request.getValue().getEntity(), instanceOf(bodyType)); } protected void doCheckAsDeleteWithStatusCode(final PublishableHttpResource resource, @@ -277,7 +297,9 @@ protected void doCheckAsDeleteWithStatusCode(final PublishableHttpResource resou final String endpoint, final CheckResponse expected, final Response response) throws IOException { - when(client.performRequest("DELETE", endpoint, deleteParameters(resource.getParameters()))).thenReturn(response); + Request request = new Request("DELETE", endpoint); + addParameters(request, deleteParameters(resource.getParameters())); + when(client.performRequest(request)).thenReturn(response); assertThat(resource.doCheck(client), is(expected)); } @@ -427,4 +449,9 @@ protected HttpEntity entityForClusterAlert(final CheckResponse expected, final i return entity; } + protected void addParameters(Request request, Map parameters) { + for (Map.Entry param : parameters.entrySet()) { + request.addParameter(param.getKey(), param.getValue()); + } + } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java index 355a5644bddd3..fdf67602af633 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java @@ -10,6 +10,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -23,6 +24,9 @@ import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.exporter.ClusterAlertsUtil; import org.elasticsearch.xpack.monitoring.exporter.Exporter; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; import org.junit.Before; import java.io.IOException; @@ -37,10 +41,9 @@ import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse.DOES_NOT_EXIST; import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse.EXISTS; import static org.hamcrest.Matchers.hasSize; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyMapOf; -import static org.mockito.Matchers.eq; -import static org.mockito.Matchers.startsWith; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; +import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -101,7 +104,8 @@ public void testInvalidVersionBlocks() throws IOException { final HttpEntity entity = new StringEntity("{\"version\":{\"number\":\"unknown\"}}", ContentType.APPLICATION_JSON); when(versionResponse.getEntity()).thenReturn(entity); - when(client.performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class))).thenReturn(versionResponse); + when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/"))))) + .thenReturn(versionResponse); assertTrue(resources.isDirty()); assertFalse(resources.checkAndPublish(client)); @@ -140,15 +144,15 @@ public void testTemplateCheckBlocksAfterSuccessfulVersion() throws IOException { final List otherResponses = getTemplateResponses(1, successful, unsuccessful); // last check fails implies that N - 2 publishes succeeded! - when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) - .thenReturn(first, otherResponses.toArray(new Response[otherResponses.size()])) - .thenThrow(exception); + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/"))))) + .thenReturn(first, otherResponses.toArray(new Response[otherResponses.size()])) + .thenThrow(exception); whenSuccessfulPutTemplates(otherResponses.size() + 1); expectedGets += 1 + successful + unsuccessful; expectedPuts = (successfulFirst ? 0 : 1) + unsuccessful; } else { - when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/"))))) .thenThrow(exception); } @@ -185,7 +189,7 @@ public void testTemplatePublishBlocksAfterSuccessfulVersion() throws IOException whenGetTemplates(successful, unsuccessful + 2); // previous publishes must have succeeded - when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/"))))) .thenReturn(firstSuccess, otherResponses.toArray(new Response[otherResponses.size()])) .thenThrow(exception); @@ -197,7 +201,7 @@ public void testTemplatePublishBlocksAfterSuccessfulVersion() throws IOException // fail the check so that it has to attempt the PUT whenGetTemplates(0, 1); - when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/"))))) .thenThrow(exception); } @@ -238,7 +242,7 @@ public void testPipelineCheckBlocksAfterSuccessfulTemplates() throws IOException } // last check fails - when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/"))))) .thenReturn(first) .thenThrow(exception); if (successfulFirst == false) { @@ -248,7 +252,7 @@ public void testPipelineCheckBlocksAfterSuccessfulTemplates() throws IOException expectedGets = EXPECTED_PIPELINES; expectedPuts = successfulFirst ? 0 : 1; } else { - when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/"))))) .thenThrow(exception); } @@ -285,10 +289,7 @@ public void testPipelinePublishBlocksAfterSuccessfulTemplates() throws IOExcepti whenGetPipelines(0, 2); // previous publishes must have succeeded - when(client.performRequest(eq("PUT"), - startsWith("/_ingest/pipeline/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/"))))) .thenReturn(firstSuccess) .thenThrow(exception); @@ -300,10 +301,7 @@ public void testPipelinePublishBlocksAfterSuccessfulTemplates() throws IOExcepti // fail the check so that it has to attempt the PUT whenGetPipelines(0, 1); - when(client.performRequest(eq("PUT"), - startsWith("/_ingest/pipeline/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/"))))) .thenThrow(exception); } @@ -334,7 +332,8 @@ public void testWatcherCheckBlocksAfterSuccessfulPipelines() throws IOException whenSuccessfulPutPipelines(unsuccessfulGetPipelines); // there's only one check - when(client.performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class))).thenThrow(exception); + when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack"))))) + .thenThrow(exception); assertTrue(resources.isDirty()); assertFalse(resources.checkAndPublish(client)); @@ -382,7 +381,7 @@ public void testWatchCheckBlocksAfterSuccessfulWatcherCheck() throws IOException final List otherResponses = getWatcherResponses(1, successful, unsuccessful); // last check fails implies that N - 2 publishes succeeded! - when(client.performRequest(eq("GET"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(first, otherResponses.toArray(new Response[otherResponses.size()])) .thenThrow(exception); whenSuccessfulPutWatches(otherResponses.size() + 1); @@ -398,7 +397,7 @@ public void testWatchCheckBlocksAfterSuccessfulWatcherCheck() throws IOException // there is no form of an unsuccessful delete; only success or error final List responses = successfulDeleteResponses(successful); - when(client.performRequest(eq("DELETE"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(responses.get(0), responses.subList(1, successful).toArray(new Response[successful - 1])) .thenThrow(exception); @@ -407,7 +406,7 @@ public void testWatchCheckBlocksAfterSuccessfulWatcherCheck() throws IOException } else { final String method = validLicense ? "GET" : "DELETE"; - when(client.performRequest(eq(method), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is(method), startsWith("/_xpack/watcher/watch/"))))) .thenThrow(exception); } @@ -463,10 +462,7 @@ public void testWatchPublishBlocksAfterSuccessfulWatcherCheck() throws IOExcepti whenGetWatches(successful, unsuccessful + 2); // previous publishes must have succeeded - when(client.performRequest(eq("PUT"), - startsWith("/_xpack/watcher/watch/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(firstSuccess, otherResponses.toArray(new Response[otherResponses.size()])) .thenThrow(exception); @@ -478,10 +474,7 @@ public void testWatchPublishBlocksAfterSuccessfulWatcherCheck() throws IOExcepti // fail the check so that it has to attempt the PUT whenGetWatches(0, 1); - when(client.performRequest(eq("PUT"), - startsWith("/_xpack/watcher/watch/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/"))))) .thenThrow(exception); } @@ -715,17 +708,18 @@ private void whenValidVersionResponse() throws IOException { final HttpEntity entity = new StringEntity("{\"version\":{\"number\":\"" + Version.CURRENT + "\"}}", ContentType.APPLICATION_JSON); when(versionResponse.getEntity()).thenReturn(entity); - when(client.performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class))).thenReturn(versionResponse); + when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/"))))) + .thenReturn(versionResponse); } private void whenGetTemplates(final int successful, final int unsuccessful) throws IOException { final List gets = getTemplateResponses(0, successful, unsuccessful); if (gets.size() == 1) { - when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/"))))) .thenReturn(gets.get(0)); } else { - when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/"))))) .thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1])); } } @@ -735,10 +729,10 @@ private void whenSuccessfulPutTemplates(final int successful) throws IOException // empty is possible if they all exist if (successful == 1) { - when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/"))))) .thenReturn(successfulPuts.get(0)); } else if (successful > 1) { - when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/"))))) .thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1])); } } @@ -747,10 +741,10 @@ private void whenGetPipelines(final int successful, final int unsuccessful) thro final List gets = getPipelineResponses(0, successful, unsuccessful); if (gets.size() == 1) { - when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/"))))) .thenReturn(gets.get(0)); } else { - when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/"))))) .thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1])); } } @@ -760,16 +754,10 @@ private void whenSuccessfulPutPipelines(final int successful) throws IOException // empty is possible if they all exist if (successful == 1) { - when(client.performRequest(eq("PUT"), - startsWith("/_ingest/pipeline/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/"))))) .thenReturn(successfulPuts.get(0)); } else if (successful > 1) { - when(client.performRequest(eq("PUT"), - startsWith("/_ingest/pipeline/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/"))))) .thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1])); } } @@ -787,7 +775,8 @@ private void whenWatcherCanBeUsed(final boolean validLicense) throws IOException final Response successfulGet = response("GET", "_xpack", successfulCheckStatus(), entity); // empty is possible if they all exist - when(client.performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class))).thenReturn(successfulGet); + when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack"))))) + .thenReturn(successfulGet); } private void whenWatcherCannotBeUsed() throws IOException { @@ -805,17 +794,18 @@ private void whenWatcherCannotBeUsed() throws IOException { } // empty is possible if they all exist - when(client.performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class))).thenReturn(response); + when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack"))))) + .thenReturn(response); } private void whenGetWatches(final int successful, final int unsuccessful) throws IOException { final List gets = getWatcherResponses(0, successful, unsuccessful); if (gets.size() == 1) { - when(client.performRequest(eq("GET"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(gets.get(0)); } else { - when(client.performRequest(eq("GET"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1])); } } @@ -825,16 +815,10 @@ private void whenSuccessfulPutWatches(final int successful) throws IOException { // empty is possible if they all exist if (successful == 1) { - when(client.performRequest(eq("PUT"), - startsWith("/_xpack/watcher/watch/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(successfulPuts.get(0)); } else if (successful > 1) { - when(client.performRequest(eq("PUT"), - startsWith("/_xpack/watcher/watch/"), - anyMapOf(String.class, String.class), - any(HttpEntity.class))) + when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1])); } } @@ -844,64 +828,55 @@ private void whenSuccessfulDeleteWatches(final int successful) throws IOExceptio // empty is possible if they all exist if (successful == 1) { - when(client.performRequest(eq("DELETE"), - startsWith("/_xpack/watcher/watch/"), - anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(successfulDeletes.get(0)); } else if (successful > 1) { - when(client.performRequest(eq("DELETE"), - startsWith("/_xpack/watcher/watch/"), - anyMapOf(String.class, String.class))) + when(client.performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/"))))) .thenReturn(successfulDeletes.get(0), successfulDeletes.subList(1, successful).toArray(new Response[successful - 1])); } } + private void verifyVersionCheck() throws IOException { - verify(client).performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class)); + verify(client).performRequest(argThat(new RequestMatcher(is("GET"), is("/")))); } private void verifyGetTemplates(final int called) throws IOException { - verify(client, times(called)).performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class)); + verify(client, times(called)) + .performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/")))); } private void verifyPutTemplates(final int called) throws IOException { - verify(client, times(called)).performRequest(eq("PUT"), // method - startsWith("/_template/"), // endpoint - anyMapOf(String.class, String.class), // parameters (e.g., timeout) - any(HttpEntity.class)); // raw template + verify(client, times(called)) + .performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/")))); } private void verifyGetPipelines(final int called) throws IOException { - verify(client, times(called)).performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class)); + verify(client, times(called)) + .performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/")))); } private void verifyPutPipelines(final int called) throws IOException { - verify(client, times(called)).performRequest(eq("PUT"), // method - startsWith("/_ingest/pipeline/"), // endpoint - anyMapOf(String.class, String.class), // parameters (e.g., timeout) - any(HttpEntity.class)); // raw template + verify(client, times(called)) + .performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/")))); } private void verifyWatcherCheck() throws IOException { - verify(client).performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class)); + verify(client).performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack")))); } private void verifyDeleteWatches(final int called) throws IOException { - verify(client, times(called)).performRequest(eq("DELETE"), // method - startsWith("/_xpack/watcher/watch/"), // endpoint - anyMapOf(String.class, String.class));// parameters (e.g., timeout) + verify(client, times(called)) + .performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/")))); } private void verifyGetWatches(final int called) throws IOException { - verify(client, times(called)).performRequest(eq("GET"), - startsWith("/_xpack/watcher/watch/"), - anyMapOf(String.class, String.class)); + verify(client, times(called)) + .performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/")))); } private void verifyPutWatches(final int called) throws IOException { - verify(client, times(called)).performRequest(eq("PUT"), // method - startsWith("/_xpack/watcher/watch/"), // endpoint - anyMapOf(String.class, String.class), // parameters (e.g., timeout) - any(HttpEntity.class)); // raw template + verify(client, times(called)) + .performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/")))); } private ClusterService mockClusterService(final ClusterState state) { @@ -922,4 +897,24 @@ private ClusterState mockClusterState(final boolean electedMaster) { return state; } + private static class RequestMatcher extends TypeSafeMatcher { + private final Matcher method; + private final Matcher endpoint; + + private RequestMatcher(Matcher method, Matcher endpoint) { + this.method = method; + this.endpoint = endpoint; + } + + @Override + protected boolean matchesSafely(Request item) { + return method.matches(item.getMethod()) && endpoint.matches(item.getEndpoint()); + } + + @Override + public void describeTo(Description description) { + description.appendText("method is ").appendDescriptionOf(method); + description.appendText(" and endpoint is ").appendDescriptionOf(endpoint); + } + } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java index 9c217612263d7..e32effc85f94b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/PublishableHttpResourceTests.java @@ -10,6 +10,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -61,13 +62,15 @@ public void testCheckForResourceUnexpectedResponse() throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final RestStatus failedStatus = failedCheckStatus(); final Response response = response("GET", endpoint, failedStatus); + final Request request = new Request("GET", endpoint); + addParameters(request, getParameters(resource.getParameters())); - when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response); + when(client.performRequest(request)).thenReturn(response); sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, CheckResponse.ERROR, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters())); + verify(client).performRequest(request); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class)); verifyNoMoreInteractions(client, logger); @@ -95,8 +98,10 @@ public void testVersionCheckForResourceUnexpectedResponse() throws IOException { final Response response = response("GET", endpoint, failedStatus); final XContent xContent = mock(XContent.class); final int minimumVersion = randomInt(); + final Request request = new Request("GET", endpoint); + addParameters(request, getParameters(resource.getParameters())); - when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response); + when(client.performRequest(request)).thenReturn(response); assertThat(resource.versionCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType, @@ -104,7 +109,7 @@ public void testVersionCheckForResourceUnexpectedResponse() throws IOException { is(CheckResponse.ERROR)); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters())); + verify(client).performRequest(request); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class)); verifyNoMoreInteractions(client, logger); @@ -117,8 +122,10 @@ public void testVersionCheckForResourceMalformedResponse() throws IOException { final HttpEntity entity = entityForResource(CheckResponse.ERROR, resourceName, minimumVersion); final Response response = response("GET", endpoint, okStatus, entity); final XContent xContent = mock(XContent.class); + final Request request = new Request("GET", endpoint); + addParameters(request, getParameters(resource.getParameters())); - when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response); + when(client.performRequest(request)).thenReturn(response); assertThat(resource.versionCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType, @@ -127,7 +134,7 @@ public void testVersionCheckForResourceMalformedResponse() throws IOException { verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); verify(logger).debug("{} [{}] found on the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters())); + verify(client).performRequest(request); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class)); verifyNoMoreInteractions(client, logger); @@ -140,12 +147,14 @@ public void testCheckForResourceErrors() throws IOException { final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); final Response response = e == responseException ? responseException.getResponse() : null; - when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenThrow(e); + Request request = new Request("GET", endpoint); + addParameters(request, getParameters(resource.getParameters())); + when(client.performRequest(request)).thenThrow(e); sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, CheckResponse.ERROR, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters())); + verify(client).performRequest(request); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); verifyNoMoreInteractions(client, logger); @@ -162,13 +171,16 @@ public void testPutResourceFalse() throws IOException { public void testPutResourceFalseWithException() throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected")); + final Request request = new Request("PUT", endpoint); + addParameters(request, resource.getParameters()); + request.setEntity(entity); - when(client.performRequest("PUT", endpoint, resource.getParameters(), entity)).thenThrow(e); + when(client.performRequest(request)).thenThrow(e); assertThat(resource.putResource(client, logger, resourceBasePath, resourceName, body, resourceType, owner, ownerType), is(false)); verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("PUT", endpoint, resource.getParameters(), entity); + verify(client).performRequest(request); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); verifyNoMoreInteractions(client, logger); @@ -190,13 +202,15 @@ public void testDeleteResourceErrors() throws IOException { final ResponseException responseException = responseException("DELETE", endpoint, failedStatus); final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException); final Map deleteParameters = deleteParameters(resource.getParameters()); + final Request request = new Request("DELETE", endpoint); + addParameters(request, deleteParameters); - when(client.performRequest("DELETE", endpoint, deleteParameters)).thenThrow(e); + when(client.performRequest(request)).thenThrow(e); assertThat(resource.deleteResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), is(false)); verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("DELETE", endpoint, deleteParameters); + verify(client).performRequest(request); verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e)); verifyNoMoreInteractions(client, logger); @@ -277,13 +291,15 @@ private void assertCheckForResource(final RestStatus status, final CheckResponse throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("GET", endpoint, status); + final Request request = new Request("GET", endpoint); + addParameters(request, getParameters(resource.getParameters())); - when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response); + when(client.performRequest(request)).thenReturn(response); sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, expected, response); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters())); + verify(client).performRequest(request); if (expected == CheckResponse.EXISTS || expected == CheckResponse.DOES_NOT_EXIST) { verify(response).getStatusLine(); @@ -310,8 +326,10 @@ private void assertVersionCheckForResource(final RestStatus status, final CheckR final HttpEntity entity = status == RestStatus.OK ? entityForResource(expected, resourceName, minimumVersion) : null; final Response response = response("GET", endpoint, status, entity); final XContent xContent = XContentType.JSON.xContent(); + final Request request = new Request("GET", endpoint); + addParameters(request, getParameters(resource.getParameters())); - when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response); + when(client.performRequest(request)).thenReturn(response); assertThat(resource.versionCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType, @@ -319,7 +337,7 @@ private void assertVersionCheckForResource(final RestStatus status, final CheckR is(expected)); verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType); - verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters())); + verify(client).performRequest(request); if (shouldReplace || expected == CheckResponse.EXISTS) { verify(response).getStatusLine(); @@ -341,13 +359,16 @@ private void assertVersionCheckForResource(final RestStatus status, final CheckR private void assertPutResource(final RestStatus status, final boolean expected) throws IOException { final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("PUT", endpoint, status); + final Request request = new Request("PUT", endpoint); + addParameters(request, resource.getParameters()); + request.setEntity(entity); - when(client.performRequest("PUT", endpoint, resource.getParameters(), entity)).thenReturn(response); + when(client.performRequest(request)).thenReturn(response); assertThat(resource.putResource(client, logger, resourceBasePath, resourceName, body, resourceType, owner, ownerType), is(expected)); - verify(client).performRequest("PUT", endpoint, resource.getParameters(), entity); + verify(client).performRequest(request); verify(response).getStatusLine(); verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType); @@ -388,12 +409,14 @@ private void assertDeleteResource(final RestStatus status, final boolean expecte final String endpoint = concatenateEndpoint(resourceBasePath, resourceName); final Response response = response("DELETE", endpoint, status); final Map deleteParameters = deleteParameters(resource.getParameters()); + final Request request = new Request("DELETE", endpoint); + addParameters(request, deleteParameters); - when(client.performRequest("DELETE", endpoint, deleteParameters)).thenReturn(response); + when(client.performRequest(request)).thenReturn(response); assertThat(resource.deleteResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), is(expected)); - verify(client).performRequest("DELETE", endpoint, deleteParameters); + verify(client).performRequest(request); verify(response).getStatusLine(); verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResourceTests.java index d6005b570467b..27de4b28cee5e 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/VersionHttpResourceTests.java @@ -6,8 +6,9 @@ package org.elasticsearch.xpack.monitoring.exporter.http; import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.test.ESTestCase; @@ -73,8 +74,9 @@ public void testDoCheckAndPublishFailedFieldWrongType() throws IOException { } public void testDoCheckAndPublishFailedWithIOException() throws IOException { - // request fails for some reason - when(client.performRequest("GET", "/", VersionHttpResource.PARAMETERS)).thenThrow(new IOException("expected")); + Request request = new Request("GET", "/"); + request.addParameter("filter_path", "version.number"); + when(client.performRequest(request)).thenThrow(new IOException("expected")); final VersionHttpResource resource = new VersionHttpResource(owner, Version.CURRENT); @@ -82,12 +84,14 @@ public void testDoCheckAndPublishFailedWithIOException() throws IOException { } private Response responseForJSON(final String json) throws IOException { - final StringEntity entity = new StringEntity(json, ContentType.APPLICATION_JSON); + final NStringEntity entity = new NStringEntity(json, ContentType.APPLICATION_JSON); final Response response = mock(Response.class); when(response.getEntity()).thenReturn(entity); - when(client.performRequest("GET", "/", VersionHttpResource.PARAMETERS)).thenReturn(response); + Request request = new Request("GET", "/"); + request.addParameter("filter_path", "version.number"); + when(client.performRequest(request)).thenReturn(response); return response; } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index a77f6bf24e99d..efc32fccb3dda 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -35,11 +35,11 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.core.action.XPackUsageResponse; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkRequestBuilder; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkResponse; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; -import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; -import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.monitoring.LocalStateMonitoring; import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsMonitoringDoc; @@ -112,7 +112,6 @@ private String createBulkEntity() { * This test uses the Monitoring Bulk API to index document as an external application like Kibana would do. It * then ensure that the documents were correctly indexed and have the expected information. */ - @SuppressWarnings("unchecked") public void testMonitoringBulk() throws Exception { whenExportersAreReady(() -> { final MonitoredSystem system = randomSystem(); @@ -188,7 +187,6 @@ public void testMonitoringBulk() throws Exception { * have been indexed with the expected information. */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29880") - @SuppressWarnings("unchecked") public void testMonitoringService() throws Exception { final boolean createAPMIndex = randomBoolean(); final String indexName = createAPMIndex ? "apm-2017.11.06" : "books"; @@ -284,7 +282,6 @@ private void assertMonitoringDoc(final Map document, * Asserts that the source_node information (provided as a Map) of a monitoring document correspond to * the current local node information */ - @SuppressWarnings("unchecked") private void assertMonitoringDocSourceNode(final Map sourceNode) { assertEquals(6, sourceNode.size()); @@ -541,7 +538,6 @@ public void enableMonitoring() throws Exception { /** * Disable the monitoring service and the Local exporter. */ - @SuppressWarnings("unchecked") public void disableMonitoring() throws Exception { final Settings settings = Settings.builder() .putNull("xpack.monitoring.collection.enabled") diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index ff9c30ed9a934..649a89bc2cdee 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -16,7 +16,7 @@ compileTestJava.options.compilerArgs << "-Xlint:-rawtypes" dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java index dc2fac776c6c0..538babf4fbced 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java @@ -208,7 +208,7 @@ public static List translateAggregation(AggregationBuilder s private static List translateDateHistogram(DateHistogramAggregationBuilder source, List filterConditions, NamedWriteableRegistry registry) { - + return translateVSAggBuilder(source, filterConditions, registry, () -> { DateHistogramAggregationBuilder rolledDateHisto = new DateHistogramAggregationBuilder(source.getName()); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 854b01659541a..6db533bbecf9b 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -12,7 +12,7 @@ esplugin { archivesBaseName = 'x-pack-security' dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') compileOnly project(path: ':modules:transport-netty4', configuration: 'runtime') compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime') @@ -52,12 +52,73 @@ dependencies { compile "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" compile "org.apache.httpcomponents:httpclient-cache:${versions.httpclient}" - compile 'com.google.guava:guava:19.0' + compile 'com.google.guava:guava:19.0' testCompile 'org.elasticsearch:securemock:1.2' testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}" //testCompile "org.yaml:snakeyaml:${versions.snakeyaml}" + // Test dependencies for Kerberos (MiniKdc) + testCompile('commons-io:commons-io:2.5') + testCompile('org.apache.kerby:kerb-simplekdc:1.1.1') + testCompile('org.apache.kerby:kerb-client:1.1.1') + testCompile('org.apache.kerby:kerby-config:1.1.1') + testCompile('org.apache.kerby:kerb-core:1.1.1') + testCompile('org.apache.kerby:kerby-pkix:1.1.1') + testCompile('org.apache.kerby:kerby-asn1:1.1.1') + testCompile('org.apache.kerby:kerby-util:1.1.1') + testCompile('org.apache.kerby:kerb-common:1.1.1') + testCompile('org.apache.kerby:kerb-crypto:1.1.1') + testCompile('org.apache.kerby:kerb-util:1.1.1') + testCompile('org.apache.kerby:token-provider:1.1.1') + testCompile('com.nimbusds:nimbus-jose-jwt:4.41.2') + testCompile('net.jcip:jcip-annotations:1.0') + testCompile('org.apache.kerby:kerb-admin:1.1.1') + testCompile('org.apache.kerby:kerb-server:1.1.1') + testCompile('org.apache.kerby:kerb-identity:1.1.1') + testCompile('org.apache.kerby:kerby-xdr:1.1.1') + + // LDAP backend support for SimpleKdcServer + testCompile('org.apache.kerby:kerby-backend:1.1.1') + testCompile('org.apache.kerby:ldap-backend:1.1.1') + testCompile('org.apache.kerby:kerb-identity:1.1.1') + testCompile('org.apache.directory.api:api-ldap-client-api:1.0.0') + testCompile('org.apache.directory.api:api-ldap-schema-data:1.0.0') + testCompile('org.apache.directory.api:api-ldap-codec-core:1.0.0') + testCompile('org.apache.directory.api:api-ldap-extras-aci:1.0.0') + testCompile('org.apache.directory.api:api-ldap-extras-codec:1.0.0') + testCompile('org.apache.directory.api:api-ldap-extras-codec-api:1.0.0') + testCompile('commons-pool:commons-pool:1.6') + testCompile('commons-collections:commons-collections:3.2') + testCompile('org.apache.mina:mina-core:2.0.17') + testCompile('org.apache.directory.api:api-util:1.0.1') + testCompile('org.apache.directory.api:api-i18n:1.0.1') + testCompile('org.apache.directory.api:api-ldap-model:1.0.1') + testCompile('org.apache.directory.api:api-asn1-api:1.0.1') + testCompile('org.apache.directory.api:api-asn1-ber:1.0.1') + testCompile('org.apache.servicemix.bundles:org.apache.servicemix.bundles.antlr:2.7.7_5') + testCompile('org.apache.directory.server:apacheds-core-api:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-i18n:2.0.0-M24') + testCompile('org.apache.directory.api:api-ldap-extras-util:1.0.0') + testCompile('net.sf.ehcache:ehcache:2.10.4') + testCompile('org.apache.directory.server:apacheds-kerberos-codec:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-protocol-ldap:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-protocol-shared:2.0.0-M24') + testCompile('org.apache.directory.jdbm:apacheds-jdbm1:2.0.0-M3') + testCompile('org.apache.directory.server:apacheds-jdbm-partition:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-xdbm-partition:2.0.0-M24') + testCompile('org.apache.directory.api:api-ldap-extras-sp:1.0.0') + testCompile('org.apache.directory.server:apacheds-test-framework:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-core-annotations:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-ldif-partition:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-mavibot-partition:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-protocol-kerberos:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-server-annotations:2.0.0-M24') + testCompile('org.apache.directory.api:api-ldap-codec-standalone:1.0.0') + testCompile('org.apache.directory.api:api-ldap-net-mina:1.0.0') + testCompile('org.apache.directory.server:ldap-client-test:2.0.0-M24') + testCompile('org.apache.directory.server:apacheds-interceptor-kerberos:2.0.0-M24') + testCompile('org.apache.directory.mavibot:mavibot:1.0.0-M8') } compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 1799a2c7b81e1..19b86dd91c78b 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -4,7 +4,7 @@ archivesBaseName = 'elasticsearch-security-cli' dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly xpackProject('plugin:core') + compileOnly project(path: xpackModule('core'), configuration: 'shadow') compile 'org.bouncycastle:bcprov-jdk15on:1.59' compile 'org.bouncycastle:bcpkix-jdk15on:1.59' testImplementation 'com.google.jimfs:jimfs:1.1' @@ -17,4 +17,4 @@ dependencies { dependencyLicenses { mapping from: /bc.*/, to: 'bouncycastle' -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheck.java new file mode 100644 index 0000000000000..4a2c7b97195eb --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheck.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.settings.Settings; + + +public class FIPS140JKSKeystoreBootstrapCheck implements BootstrapCheck { + + private final boolean fipsModeEnabled; + + FIPS140JKSKeystoreBootstrapCheck(Settings settings) { + this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings); + } + + /** + * Test if the node fails the check. + * + * @param context the bootstrap context + * @return the result of the bootstrap check + */ + @Override + public BootstrapCheckResult check(BootstrapContext context) { + + if (fipsModeEnabled) { + final Settings settings = context.settings; + Settings keystoreTypeSettings = settings.filter(k -> k.endsWith("keystore.type")) + .filter(k -> settings.get(k).equalsIgnoreCase("jks")); + if (keystoreTypeSettings.isEmpty() == false) { + return BootstrapCheckResult.failure("JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please " + + "revisit [" + keystoreTypeSettings.toDelimitedString(',') + "] settings"); + } + // Default Keystore type is JKS if not explicitly set + Settings keystorePathSettings = settings.filter(k -> k.endsWith("keystore.path")) + .filter(k -> settings.hasValue(k.replace(".path", ".type")) == false); + if (keystorePathSettings.isEmpty() == false) { + return BootstrapCheckResult.failure("JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please " + + "revisit [" + keystorePathSettings.toDelimitedString(',') + "] settings"); + } + + } + return BootstrapCheckResult.success(); + } + + @Override + public boolean alwaysEnforce() { + return fipsModeEnabled; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheck.java new file mode 100644 index 0000000000000..7f6d799cf5a8e --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheck.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.core.XPackSettings; + +import java.util.Locale; + +public class FIPS140PasswordHashingAlgorithmBootstrapCheck implements BootstrapCheck { + + private final boolean fipsModeEnabled; + + FIPS140PasswordHashingAlgorithmBootstrapCheck(Settings settings) { + this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings); + } + + /** + * Test if the node fails the check. + * + * @param context the bootstrap context + * @return the result of the bootstrap check + */ + @Override + public BootstrapCheckResult check(BootstrapContext context) { + final String selectedAlgorithm = XPackSettings.PASSWORD_HASHING_ALGORITHM.get(context.settings); + if (selectedAlgorithm.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) { + return BootstrapCheckResult.failure("Only PBKDF2 is allowed for password hashing in a FIPS-140 JVM. Please set the " + + "appropriate value for [ " + XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey() + " ] setting."); + } + return BootstrapCheckResult.success(); + } + + @Override + public boolean alwaysEnforce() { + return fipsModeEnabled; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheck.java new file mode 100644 index 0000000000000..c766dd0ffaa2b --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheck.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; + +import java.io.IOException; +import java.io.UncheckedIOException; + +public class FIPS140SecureSettingsBootstrapCheck implements BootstrapCheck { + + private final boolean fipsModeEnabled; + private final Environment environment; + + FIPS140SecureSettingsBootstrapCheck(Settings settings, Environment environment) { + this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings); + this.environment = environment; + } + + /** + * Test if the node fails the check. + * + * @param context the bootstrap context + * @return the result of the bootstrap check + */ + @Override + public BootstrapCheckResult check(BootstrapContext context) { + if (fipsModeEnabled) { + try (KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configFile())) { + if (secureSettings != null && secureSettings.getFormatVersion() < 3) { + return BootstrapCheckResult.failure("Secure settings store is not of the latest version. Please use " + + "bin/elasticsearch-keystore create to generate a new secure settings store and migrate the secure settings there."); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + return BootstrapCheckResult.success(); + } + + @Override + public boolean alwaysEnforce() { + return fipsModeEnabled; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 98b5df3edc528..f4bb4b7eb3b2e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -77,12 +77,16 @@ import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.SecuritySettings; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; @@ -117,8 +121,12 @@ import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapper; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; +import org.elasticsearch.xpack.security.action.privilege.TransportDeletePrivilegesAction; +import org.elasticsearch.xpack.security.action.privilege.TransportGetPrivilegesAction; +import org.elasticsearch.xpack.security.action.privilege.TransportPutPrivilegesAction; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.index.IndexAuditTrailField; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; @@ -168,11 +176,13 @@ import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; +import org.elasticsearch.xpack.security.authc.kerberos.KerberosRealmBootstrapCheck; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.ingest.SetSecurityUserProcessor; @@ -180,6 +190,10 @@ import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction; +import org.elasticsearch.xpack.security.rest.action.privilege.RestDeletePrivilegesAction; +import org.elasticsearch.xpack.security.rest.action.privilege.RestGetPrivilegesAction; +import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegeAction; +import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestDeleteRoleAction; @@ -204,7 +218,6 @@ import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServerTransport; import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4ServerTransport; -import org.elasticsearch.xpack.core.template.TemplateUtils; import org.elasticsearch.xpack.security.transport.nio.SecurityNioHttpServerTransport; import org.elasticsearch.xpack.security.transport.nio.SecurityNioTransport; import org.joda.time.DateTime; @@ -243,6 +256,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw DiscoveryPlugin, MapperPlugin, ExtensiblePlugin { private static final Logger logger = Loggers.getLogger(Security.class); + static final Setting FIPS_MODE_ENABLED = + Setting.boolSetting("xpack.security.fips_mode.enabled", false, Property.NodeScope); static final Setting> AUDIT_OUTPUTS_SETTING = Setting.listSetting(SecurityField.setting("audit.outputs"), @@ -281,9 +296,6 @@ public Security(Settings settings, final Path configPath) { this.enabled = XPackSettings.SECURITY_ENABLED.get(settings); if (enabled && transportClientMode == false) { validateAutoCreateIndex(settings); - } - - if (enabled) { // we load them all here otherwise we can't access secure settings since they are closed once the checks are // fetched final List checks = new ArrayList<>(); @@ -291,9 +303,14 @@ public Security(Settings settings, final Path configPath) { new TokenSSLBootstrapCheck(), new PkiRealmBootstrapCheck(getSslService()), new TLSLicenseBootstrapCheck(), - new PasswordHashingAlgorithmBootstrapCheck())); + new PasswordHashingAlgorithmBootstrapCheck(), + new FIPS140SecureSettingsBootstrapCheck(settings, env), + new FIPS140JKSKeystoreBootstrapCheck(settings), + new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings), + new KerberosRealmBootstrapCheck(env))); checks.addAll(InternalRealms.getBootstrapChecks(settings, env)); this.bootstrapChecks = Collections.unmodifiableList(checks); + Automatons.updateMaxDeterminizedStates(settings); } else { this.bootstrapChecks = Collections.emptyList(); } @@ -432,28 +449,15 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste securityIndex.get().addIndexStateListener(nativeRoleMappingStore::onSecurityIndexStateChange); - AuthenticationFailureHandler failureHandler = null; - String extensionName = null; - for (SecurityExtension extension : securityExtensions) { - AuthenticationFailureHandler extensionFailureHandler = extension.getAuthenticationFailureHandler(); - if (extensionFailureHandler != null && failureHandler != null) { - throw new IllegalStateException("Extensions [" + extensionName + "] and [" + extension.toString() + "] " + - "both set an authentication failure handler"); - } - failureHandler = extensionFailureHandler; - extensionName = extension.toString(); - } - if (failureHandler == null) { - logger.debug("Using default authentication failure handler"); - failureHandler = new DefaultAuthenticationFailureHandler(); - } else { - logger.debug("Using authentication failure handler from extension [" + extensionName + "]"); - } + final AuthenticationFailureHandler failureHandler = createAuthenticationFailureHandler(realms); authcService.set(new AuthenticationService(settings, realms, auditTrailService, failureHandler, threadPool, anonymousUser, tokenService)); components.add(authcService.get()); + final NativePrivilegeStore privilegeStore = new NativePrivilegeStore(settings, client, securityIndex.get()); + components.add(privilegeStore); + final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState()); final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityIndex.get()); final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(); @@ -462,7 +466,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste rolesProviders.addAll(extension.getRolesProviders(settings, resourceWatcherService)); } final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, - reservedRolesStore, rolesProviders, threadPool.getThreadContext(), getLicenseState()); + reservedRolesStore, privilegeStore, rolesProviders, threadPool.getThreadContext(), getLicenseState()); securityIndex.get().addIndexStateListener(allRolesStore::onSecurityIndexStateChange); // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be // minimal @@ -498,6 +502,45 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste return components; } + private AuthenticationFailureHandler createAuthenticationFailureHandler(final Realms realms) { + AuthenticationFailureHandler failureHandler = null; + String extensionName = null; + for (SecurityExtension extension : securityExtensions) { + AuthenticationFailureHandler extensionFailureHandler = extension.getAuthenticationFailureHandler(); + if (extensionFailureHandler != null && failureHandler != null) { + throw new IllegalStateException("Extensions [" + extensionName + "] and [" + extension.toString() + "] " + + "both set an authentication failure handler"); + } + failureHandler = extensionFailureHandler; + extensionName = extension.toString(); + } + if (failureHandler == null) { + logger.debug("Using default authentication failure handler"); + final Map> defaultFailureResponseHeaders = new HashMap<>(); + realms.asList().stream().forEach((realm) -> { + Map> realmFailureHeaders = realm.getAuthenticationFailureHeaders(); + realmFailureHeaders.entrySet().stream().forEach((e) -> { + String key = e.getKey(); + e.getValue().stream() + .filter(v -> defaultFailureResponseHeaders.computeIfAbsent(key, x -> new ArrayList<>()).contains(v) == false) + .forEach(v -> defaultFailureResponseHeaders.get(key).add(v)); + }); + }); + + if (TokenService.isTokenServiceEnabled(settings)) { + String bearerScheme = "Bearer realm=\"" + XPackField.SECURITY + "\""; + if (defaultFailureResponseHeaders.computeIfAbsent("WWW-Authenticate", x -> new ArrayList<>()) + .contains(bearerScheme) == false) { + defaultFailureResponseHeaders.get("WWW-Authenticate").add(bearerScheme); + } + } + failureHandler = new DefaultAuthenticationFailureHandler(defaultFailureResponseHeaders); + } else { + logger.debug("Using authentication failure handler from extension [" + extensionName + "]"); + } + return failureHandler; + } + @Override public Settings additionalSettings() { return additionalSettings(settings, enabled, transportClientMode); @@ -552,6 +595,7 @@ public static List> getSettings(boolean transportClientMode, List> getSettings(boolean transportClientMode, List(SamlPrepareAuthenticationAction.INSTANCE, TransportSamlPrepareAuthenticationAction.class), new ActionHandler<>(SamlAuthenticateAction.INSTANCE, TransportSamlAuthenticateAction.class), new ActionHandler<>(SamlLogoutAction.INSTANCE, TransportSamlLogoutAction.class), - new ActionHandler<>(SamlInvalidateSessionAction.INSTANCE, TransportSamlInvalidateSessionAction.class) + new ActionHandler<>(SamlInvalidateSessionAction.INSTANCE, TransportSamlInvalidateSessionAction.class), + new ActionHandler<>(GetPrivilegesAction.INSTANCE, TransportGetPrivilegesAction.class), + new ActionHandler<>(PutPrivilegesAction.INSTANCE, TransportPutPrivilegesAction.class), + new ActionHandler<>(DeletePrivilegesAction.INSTANCE, TransportDeletePrivilegesAction.class) ); } @@ -716,7 +764,11 @@ public List getRestHandlers(Settings settings, RestController restC new RestSamlPrepareAuthenticationAction(settings, restController, getLicenseState()), new RestSamlAuthenticateAction(settings, restController, getLicenseState()), new RestSamlLogoutAction(settings, restController, getLicenseState()), - new RestSamlInvalidateSessionAction(settings, restController, getLicenseState()) + new RestSamlInvalidateSessionAction(settings, restController, getLicenseState()), + new RestGetPrivilegesAction(settings, restController, getLicenseState()), + new RestPutPrivilegesAction(settings, restController, getLicenseState()), + new RestPutPrivilegeAction(settings, restController, getLicenseState()), + new RestDeletePrivilegesAction(settings, restController, getLicenseState()) ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportDeletePrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportDeletePrivilegesAction.java new file mode 100644 index 0000000000000..da23ac50b6912 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportDeletePrivilegesAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.privilege; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesResponse; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; + +import java.util.Collections; +import java.util.Set; + +/** + * Transport action to retrieve one or more application privileges from the security index + */ +public class TransportDeletePrivilegesAction extends HandledTransportAction { + + private final NativePrivilegeStore privilegeStore; + + @Inject + public TransportDeletePrivilegesAction(Settings settings, ActionFilters actionFilters, + NativePrivilegeStore privilegeStore, + TransportService transportService) { + super(settings, DeletePrivilegesAction.NAME, transportService, actionFilters, DeletePrivilegesRequest::new); + this.privilegeStore = privilegeStore; + } + + @Override + protected void doExecute(Task task, final DeletePrivilegesRequest request, final ActionListener listener) { + if (request.privileges() == null || request.privileges().length == 0) { + listener.onResponse(new DeletePrivilegesResponse(Collections.emptyList())); + return; + } + final Set names = Sets.newHashSet(request.privileges()); + this.privilegeStore.deletePrivileges(request.application(), names, request.getRefreshPolicy(), ActionListener.wrap( + privileges -> listener.onResponse( + new DeletePrivilegesResponse(privileges.getOrDefault(request.application(), Collections.emptyList())) + ), listener::onFailure + )); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportGetPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportGetPrivilegesAction.java new file mode 100644 index 0000000000000..2a2f38fe7bbbd --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportGetPrivilegesAction.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.privilege; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesResponse; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.common.Strings.isNullOrEmpty; + +/** + * Transport action to retrieve one or more application privileges from the security index + */ +public class TransportGetPrivilegesAction extends HandledTransportAction { + + private final NativePrivilegeStore privilegeStore; + + @Inject + public TransportGetPrivilegesAction(Settings settings, ActionFilters actionFilters, + NativePrivilegeStore privilegeStore, TransportService transportService) { + super(settings, GetPrivilegesAction.NAME, transportService, actionFilters, GetPrivilegesRequest::new); + this.privilegeStore = privilegeStore; + } + + @Override + protected void doExecute(Task task, final GetPrivilegesRequest request, final ActionListener listener) { + final Set names; + if (request.privileges() == null || request.privileges().length == 0) { + names = null; + } else { + names = new HashSet<>(Arrays.asList(request.privileges())); + } + final Collection applications = isNullOrEmpty(request.application()) ? null : Collections.singleton(request.application()); + this.privilegeStore.getPrivileges(applications, names, ActionListener.wrap( + privileges -> listener.onResponse(new GetPrivilegesResponse(privileges)), + listener::onFailure + )); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportPutPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportPutPrivilegesAction.java new file mode 100644 index 0000000000000..affb981c44f2c --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/privilege/TransportPutPrivilegesAction.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.privilege; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesResponse; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; + +import java.util.Collections; + +/** + * Transport action to retrieve one or more application privileges from the security index + */ +public class TransportPutPrivilegesAction extends HandledTransportAction { + + private final NativePrivilegeStore privilegeStore; + + @Inject + public TransportPutPrivilegesAction(Settings settings, ActionFilters actionFilters, + NativePrivilegeStore privilegeStore, TransportService transportService) { + super(settings, PutPrivilegesAction.NAME, transportService, actionFilters, PutPrivilegesRequest::new); + this.privilegeStore = privilegeStore; + } + + @Override + protected void doExecute(Task task, final PutPrivilegesRequest request, final ActionListener listener) { + if (request.getPrivileges() == null || request.getPrivileges().size() == 0) { + listener.onResponse(new PutPrivilegesResponse(Collections.emptyMap())); + } else { + this.privilegeStore.putPrivileges(request.getPrivileges(), request.getRefreshPolicy(), ActionListener.wrap( + created -> listener.onResponse(new PutPrivilegesResponse(created)), + listener::onFailure + )); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index 9571b022e0a67..b49984b28da08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -24,19 +24,26 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.IndicesPermission; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authz.AuthorizationService; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; /** * Transport action that tests whether a user has the specified @@ -46,13 +53,16 @@ public class TransportHasPrivilegesAction extends HandledTransportAction checkPrivileges(request, role, listener), - listener::onFailure)); + role -> resolveApplicationPrivileges(request, ActionListener.wrap( + applicationPrivilegeLookup -> checkPrivileges(request, role, applicationPrivilegeLookup, listener), + listener::onFailure)), + listener::onFailure)); + } + + private void resolveApplicationPrivileges(HasPrivilegesRequest request, + ActionListener> listener) { + final Set applications = getApplicationNames(request); + privilegeStore.getPrivileges(applications, null, listener); + } + + private Set getApplicationNames(HasPrivilegesRequest request) { + return Arrays.stream(request.applicationPrivileges()) + .map(RoleDescriptor.ApplicationResourcePrivileges::getApplication) + .collect(Collectors.toSet()); } private void checkPrivileges(HasPrivilegesRequest request, Role userRole, + Collection applicationPrivileges, ActionListener listener) { - logger.debug(() -> new ParameterizedMessage("Check whether role [{}] has privileges cluster=[{}] index=[{}]", - Strings.arrayToCommaDelimitedString(userRole.names()), Strings.arrayToCommaDelimitedString(request.clusterPrivileges()), - Strings.arrayToCommaDelimitedString(request.indexPrivileges()))); + logger.trace(() -> new ParameterizedMessage("Check whether role [{}] has privileges cluster=[{}] index=[{}] application=[{}]", + Strings.arrayToCommaDelimitedString(userRole.names()), + Strings.arrayToCommaDelimitedString(request.clusterPrivileges()), + Strings.arrayToCommaDelimitedString(request.indexPrivileges()), + Strings.arrayToCommaDelimitedString(request.applicationPrivileges()) + )); Map cluster = new HashMap<>(); for (String checkAction : request.clusterPrivileges()) { @@ -86,30 +114,62 @@ private void checkPrivileges(HasPrivilegesRequest request, Role userRole, final Map predicateCache = new HashMap<>(); - final Map indices = new LinkedHashMap<>(); + final Map indices = new LinkedHashMap<>(); for (RoleDescriptor.IndicesPrivileges check : request.indexPrivileges()) { for (String index : check.getIndices()) { final Map privileges = new HashMap<>(); - final HasPrivilegesResponse.IndexPrivileges existing = indices.get(index); + final HasPrivilegesResponse.ResourcePrivileges existing = indices.get(index); if (existing != null) { privileges.putAll(existing.getPrivileges()); } for (String privilege : check.getPrivileges()) { if (testIndexMatch(index, privilege, userRole, predicateCache)) { - logger.debug(() -> new ParameterizedMessage("Role [{}] has [{}] on [{}]", - Strings.arrayToCommaDelimitedString(userRole.names()), privilege, index)); + logger.debug(() -> new ParameterizedMessage("Role [{}] has [{}] on index [{}]", + Strings.arrayToCommaDelimitedString(userRole.names()), privilege, index)); privileges.put(privilege, true); } else { - logger.debug(() -> new ParameterizedMessage("Role [{}] does not have [{}] on [{}]", - Strings.arrayToCommaDelimitedString(userRole.names()), privilege, index)); + logger.debug(() -> new ParameterizedMessage("Role [{}] does not have [{}] on index [{}]", + Strings.arrayToCommaDelimitedString(userRole.names()), privilege, index)); privileges.put(privilege, false); allMatch = false; } } - indices.put(index, new HasPrivilegesResponse.IndexPrivileges(index, privileges)); + indices.put(index, new HasPrivilegesResponse.ResourcePrivileges(index, privileges)); + } + } + + final Map> privilegesByApplication = new HashMap<>(); + for (String applicationName : getApplicationNames(request)) { + logger.debug("Checking privileges for application {}", applicationName); + final Map appPrivilegesByResource = new LinkedHashMap<>(); + for (RoleDescriptor.ApplicationResourcePrivileges p : request.applicationPrivileges()) { + if (applicationName.equals(p.getApplication())) { + for (String resource : p.getResources()) { + final Map privileges = new HashMap<>(); + final HasPrivilegesResponse.ResourcePrivileges existing = appPrivilegesByResource.get(resource); + if (existing != null) { + privileges.putAll(existing.getPrivileges()); + } + for (String privilege : p.getPrivileges()) { + if (testResourceMatch(applicationName, resource, privilege, userRole, applicationPrivileges)) { + logger.debug(() -> new ParameterizedMessage("Role [{}] has [{} {}] on resource [{}]", + Strings.arrayToCommaDelimitedString(userRole.names()), applicationName, privilege, resource)); + privileges.put(privilege, true); + } else { + logger.debug(() -> new ParameterizedMessage("Role [{}] does not have [{} {}] on resource [{}]", + Strings.arrayToCommaDelimitedString(userRole.names()), applicationName, privilege, resource)); + privileges.put(privilege, false); + allMatch = false; + } + } + appPrivilegesByResource.put(resource, new HasPrivilegesResponse.ResourcePrivileges(resource, privileges)); + } + } } + privilegesByApplication.put(applicationName, appPrivilegesByResource.values()); } - listener.onResponse(new HasPrivilegesResponse(allMatch, cluster, indices.values())); + + listener.onResponse(new HasPrivilegesResponse(allMatch, cluster, indices.values(), privilegesByApplication)); } private boolean testIndexMatch(String checkIndex, String checkPrivilegeName, Role userRole, @@ -139,4 +199,17 @@ private static boolean testIndex(Automaton checkIndex, Automaton roleIndex) { private static boolean testPrivilege(Privilege checkPrivilege, Automaton roleAutomaton) { return Operations.subsetOf(checkPrivilege.getAutomaton(), roleAutomaton); } + + private boolean testResourceMatch(String application, String checkResource, String checkPrivilegeName, Role userRole, + Collection privileges) { + final Set nameSet = Collections.singleton(checkPrivilegeName); + final ApplicationPrivilege checkPrivilege = ApplicationPrivilege.get(application, nameSet, privileges); + assert checkPrivilege.getApplication().equals(application) + : "Privilege " + checkPrivilege + " should have application " + application; + assert checkPrivilege.name().equals(nameSet) + : "Privilege " + checkPrivilege + " should have name " + nameSet; + + return userRole.application().grants(checkPrivilege, checkResource); + } + } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 8bae951e88360..85084da84648d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -271,7 +271,9 @@ private void consumeToken(AuthenticationToken token) { if (result.getStatus() == AuthenticationResult.Status.TERMINATE) { logger.info("Authentication of [{}] was terminated by realm [{}] - {}", authenticationToken.principal(), realm.name(), result.getMessage()); - userListener.onFailure(Exceptions.authenticationError(result.getMessage(), result.getException())); + Exception e = (result.getException() != null) ? result.getException() + : Exceptions.authenticationError(result.getMessage()); + userListener.onFailure(e); } else { if (result.getMessage() != null) { messages.put(realm, new Tuple<>(result.getMessage(), result.getException())); @@ -541,7 +543,6 @@ static class AuditableRestRequest extends AuditableRequest { private final RestRequest request; - @SuppressWarnings("unchecked") AuditableRestRequest(AuditTrail auditTrail, AuthenticationFailureHandler failureHandler, ThreadContext threadContext, RestRequest request) { super(auditTrail, failureHandler, threadContext); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java index d8d0d26f99e0d..d568a052a5e15 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; @@ -24,6 +25,7 @@ import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.file.FileRealm; +import org.elasticsearch.xpack.security.authc.kerberos.KerberosRealm; import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; import org.elasticsearch.xpack.security.authc.pki.PkiRealm; import org.elasticsearch.xpack.security.authc.saml.SamlRealm; @@ -32,10 +34,8 @@ import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -50,17 +50,16 @@ public final class InternalRealms { /** * The list of all internal realm types, excluding {@link ReservedRealm#TYPE}. */ - private static final Set XPACK_TYPES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( - NativeRealmSettings.TYPE, FileRealmSettings.TYPE, LdapRealmSettings.AD_TYPE, LdapRealmSettings.LDAP_TYPE, PkiRealmSettings.TYPE, - SamlRealmSettings.TYPE - ))); + private static final Set XPACK_TYPES = Collections + .unmodifiableSet(Sets.newHashSet(NativeRealmSettings.TYPE, FileRealmSettings.TYPE, LdapRealmSettings.AD_TYPE, + LdapRealmSettings.LDAP_TYPE, PkiRealmSettings.TYPE, SamlRealmSettings.TYPE, KerberosRealmSettings.TYPE)); /** * The list of all standard realm types, which are those provided by x-pack and do not have extensive * interaction with third party sources */ - private static final Set STANDARD_TYPES = - Collections.unmodifiableSet(Sets.difference(XPACK_TYPES, Collections.singleton(SamlRealmSettings.TYPE))); + private static final Set STANDARD_TYPES = Collections.unmodifiableSet(Sets.newHashSet(NativeRealmSettings.TYPE, + FileRealmSettings.TYPE, LdapRealmSettings.AD_TYPE, LdapRealmSettings.LDAP_TYPE, PkiRealmSettings.TYPE)); /** * Determines whether type is an internal realm-type that is provided by x-pack, @@ -105,6 +104,7 @@ public static Map getFactories(ThreadPool threadPool, Res sslService, resourceWatcherService, nativeRoleMappingStore, threadPool)); map.put(PkiRealmSettings.TYPE, config -> new PkiRealm(config, resourceWatcherService, nativeRoleMappingStore)); map.put(SamlRealmSettings.TYPE, config -> SamlRealm.create(config, sslService, resourceWatcherService, nativeRoleMappingStore)); + map.put(KerberosRealmSettings.TYPE, config -> new KerberosRealm(config, nativeRoleMappingStore, threadPool)); return Collections.unmodifiableMap(map); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationToken.java new file mode 100644 index 0000000000000..1a330bd2ddd54 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationToken.java @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; + +import java.util.Arrays; +import java.util.Base64; + +/** + * This class represents an AuthenticationToken for Kerberos authentication + * using SPNEGO. The token stores base 64 decoded token bytes, extracted from + * the Authorization header with auth scheme 'Negotiate'. + *

    + * Example Authorization header "Authorization: Negotiate + * YIIChgYGKwYBBQUCoII..." + *

    + * If there is any error handling during extraction of 'Negotiate' header then + * it throws {@link ElasticsearchSecurityException} with + * {@link RestStatus#UNAUTHORIZED} and header 'WWW-Authenticate: Negotiate' + */ +public final class KerberosAuthenticationToken implements AuthenticationToken { + + public static final String WWW_AUTHENTICATE = "WWW-Authenticate"; + public static final String AUTH_HEADER = "Authorization"; + public static final String NEGOTIATE_SCHEME_NAME = "Negotiate"; + public static final String NEGOTIATE_AUTH_HEADER_PREFIX = NEGOTIATE_SCHEME_NAME + " "; + + // authorization scheme check is case-insensitive + private static final boolean IGNORE_CASE_AUTH_HEADER_MATCH = true; + + private final byte[] decodedToken; + + public KerberosAuthenticationToken(final byte[] decodedToken) { + this.decodedToken = decodedToken; + } + + /** + * Extract token from authorization header and if it is valid + * {@value #NEGOTIATE_AUTH_HEADER_PREFIX} then returns + * {@link KerberosAuthenticationToken} + * + * @param authorizationHeader Authorization header from request + * @return returns {@code null} if {@link #AUTH_HEADER} is empty or does not + * start with {@value #NEGOTIATE_AUTH_HEADER_PREFIX} else returns valid + * {@link KerberosAuthenticationToken} + * @throws ElasticsearchSecurityException when negotiate header is invalid. + */ + public static KerberosAuthenticationToken extractToken(final String authorizationHeader) { + if (Strings.isNullOrEmpty(authorizationHeader)) { + return null; + } + if (authorizationHeader.regionMatches(IGNORE_CASE_AUTH_HEADER_MATCH, 0, NEGOTIATE_AUTH_HEADER_PREFIX, 0, + NEGOTIATE_AUTH_HEADER_PREFIX.length()) == false) { + return null; + } + + final String base64EncodedToken = authorizationHeader.substring(NEGOTIATE_AUTH_HEADER_PREFIX.length()).trim(); + if (Strings.isEmpty(base64EncodedToken)) { + throw unauthorized("invalid negotiate authentication header value, expected base64 encoded token but value is empty", null); + } + + byte[] decodedKerberosTicket = null; + try { + decodedKerberosTicket = Base64.getDecoder().decode(base64EncodedToken); + } catch (IllegalArgumentException iae) { + throw unauthorized("invalid negotiate authentication header value, could not decode base64 token {}", iae, base64EncodedToken); + } + + return new KerberosAuthenticationToken(decodedKerberosTicket); + } + + @Override + public String principal() { + return ""; + } + + @Override + public Object credentials() { + return decodedToken; + } + + @Override + public void clearCredentials() { + Arrays.fill(decodedToken, (byte) 0); + } + + @Override + public int hashCode() { + return Arrays.hashCode(decodedToken); + } + + @Override + public boolean equals(final Object other) { + if (this == other) + return true; + if (other == null) + return false; + if (getClass() != other.getClass()) + return false; + final KerberosAuthenticationToken otherKerbToken = (KerberosAuthenticationToken) other; + return Arrays.equals(otherKerbToken.decodedToken, this.decodedToken); + } + + /** + * Creates {@link ElasticsearchSecurityException} with + * {@link RestStatus#UNAUTHORIZED} and cause. This also populates + * 'WWW-Authenticate' header with value as 'Negotiate' scheme. + * + * @param message the detail message + * @param cause nested exception + * @param args the arguments for the message + * @return instance of {@link ElasticsearchSecurityException} + */ + static ElasticsearchSecurityException unauthorized(final String message, final Throwable cause, final Object... args) { + ElasticsearchSecurityException ese = new ElasticsearchSecurityException(message, RestStatus.UNAUTHORIZED, cause, args); + ese.addHeader(WWW_AUTHENTICATE, NEGOTIATE_SCHEME_NAME); + return ese; + } + + /** + * Sets 'WWW-Authenticate' header if outToken is not null on passed instance of + * {@link ElasticsearchSecurityException} and returns the instance.
    + * If outToken is provided and is not {@code null} or empty, then that is + * appended to 'Negotiate ' and is used as header value for header + * 'WWW-Authenticate' sent to the peer in the form 'Negotiate oYH1MIHyoAMK...'. + * This is required by client for GSS negotiation to continue further. + * + * @param ese instance of {@link ElasticsearchSecurityException} with status + * {@link RestStatus#UNAUTHORIZED} + * @param outToken if non {@code null} and not empty then this will be the value + * sent to the peer. + * @return instance of {@link ElasticsearchSecurityException} with + * 'WWW-Authenticate' header populated. + */ + static ElasticsearchSecurityException unauthorizedWithOutputToken(final ElasticsearchSecurityException ese, final String outToken) { + assert ese.status() == RestStatus.UNAUTHORIZED; + if (Strings.hasText(outToken)) { + ese.addHeader(WWW_AUTHENTICATE, NEGOTIATE_AUTH_HEADER_PREFIX + outToken); + } + return ese; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java new file mode 100644 index 0000000000000..b4a8b6aabf076 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; +import org.elasticsearch.xpack.core.security.authc.Realm; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authc.support.CachingRealm; +import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.ietf.jgss.GSSException; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import javax.security.auth.login.LoginException; + +import static org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken.AUTH_HEADER; +import static org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX; +import static org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken.NEGOTIATE_SCHEME_NAME; +import static org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken.WWW_AUTHENTICATE; +import static org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken.unauthorized; +import static org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken.unauthorizedWithOutputToken; + +/** + * This class provides support for Kerberos authentication using spnego + * mechanism. + *

    + * It provides support to extract kerberos ticket using + * {@link KerberosAuthenticationToken#extractToken(String)} to build + * {@link KerberosAuthenticationToken} and then authenticating user when + * {@link KerberosTicketValidator} validates the ticket. + *

    + * On successful authentication, it will build {@link User} object populated + * with roles and will return {@link AuthenticationResult} with user object. On + * authentication failure, it will return {@link AuthenticationResult} with + * status to terminate authentication process. + */ +public final class KerberosRealm extends Realm implements CachingRealm { + + private final Cache userPrincipalNameToUserCache; + private final NativeRoleMappingStore userRoleMapper; + private final KerberosTicketValidator kerberosTicketValidator; + private final ThreadPool threadPool; + private final Path keytabPath; + private final boolean enableKerberosDebug; + private final boolean removeRealmName; + + public KerberosRealm(final RealmConfig config, final NativeRoleMappingStore nativeRoleMappingStore, final ThreadPool threadPool) { + this(config, nativeRoleMappingStore, new KerberosTicketValidator(), threadPool, null); + } + + // pkg scoped for testing + KerberosRealm(final RealmConfig config, final NativeRoleMappingStore nativeRoleMappingStore, + final KerberosTicketValidator kerberosTicketValidator, final ThreadPool threadPool, + final Cache userPrincipalNameToUserCache) { + super(KerberosRealmSettings.TYPE, config); + this.userRoleMapper = nativeRoleMappingStore; + this.userRoleMapper.refreshRealmOnChange(this); + final TimeValue ttl = KerberosRealmSettings.CACHE_TTL_SETTING.get(config.settings()); + if (ttl.getNanos() > 0) { + this.userPrincipalNameToUserCache = (userPrincipalNameToUserCache == null) + ? CacheBuilder.builder() + .setExpireAfterWrite(KerberosRealmSettings.CACHE_TTL_SETTING.get(config.settings())) + .setMaximumWeight(KerberosRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings())).build() + : userPrincipalNameToUserCache; + } else { + this.userPrincipalNameToUserCache = null; + } + this.kerberosTicketValidator = kerberosTicketValidator; + this.threadPool = threadPool; + this.keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + this.enableKerberosDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); + this.removeRealmName = KerberosRealmSettings.SETTING_REMOVE_REALM_NAME.get(config.settings()); + } + + @Override + public Map> getAuthenticationFailureHeaders() { + return Collections.singletonMap(WWW_AUTHENTICATE, Collections.singletonList(NEGOTIATE_SCHEME_NAME)); + } + + @Override + public void expire(final String username) { + if (userPrincipalNameToUserCache != null) { + userPrincipalNameToUserCache.invalidate(username); + } + } + + @Override + public void expireAll() { + if (userPrincipalNameToUserCache != null) { + userPrincipalNameToUserCache.invalidateAll(); + } + } + + @Override + public boolean supports(final AuthenticationToken token) { + return token instanceof KerberosAuthenticationToken; + } + + @Override + public AuthenticationToken token(final ThreadContext context) { + return KerberosAuthenticationToken.extractToken(context.getHeader(AUTH_HEADER)); + } + + @Override + public void authenticate(final AuthenticationToken token, final ActionListener listener) { + assert token instanceof KerberosAuthenticationToken; + final KerberosAuthenticationToken kerbAuthnToken = (KerberosAuthenticationToken) token; + kerberosTicketValidator.validateTicket((byte[]) kerbAuthnToken.credentials(), keytabPath, enableKerberosDebug, + ActionListener.wrap(userPrincipalNameOutToken -> { + if (userPrincipalNameOutToken.v1() != null) { + final String username = maybeRemoveRealmName(userPrincipalNameOutToken.v1()); + buildUser(username, userPrincipalNameOutToken.v2(), listener); + } else { + /** + * This is when security context could not be established may be due to ongoing + * negotiation and requires token to be sent back to peer for continuing + * further. We are terminating the authentication process as this is spengo + * negotiation and no other realm can handle this. We can have only one Kerberos + * realm in the system so terminating with RestStatus Unauthorized (401) and + * with 'WWW-Authenticate' header populated with value with token in the form + * 'Negotiate oYH1MIHyoAMK...' + */ + String errorMessage = "failed to authenticate user, gss context negotiation not complete"; + ElasticsearchSecurityException ese = unauthorized(errorMessage, null); + ese = unauthorizedWithOutputToken(ese, userPrincipalNameOutToken.v2()); + listener.onResponse(AuthenticationResult.terminate(errorMessage, ese)); + } + }, e -> handleException(e, listener))); + } + + /** + * Usually principal names are in the form 'user/instance@REALM'. This method + * removes '@REALM' part from the principal name if + * {@link KerberosRealmSettings#SETTING_REMOVE_REALM_NAME} is {@code true} else + * will return the input string. + * + * @param principalName user principal name + * @return username after removal of realm + */ + protected String maybeRemoveRealmName(final String principalName) { + if (this.removeRealmName) { + int foundAtIndex = principalName.indexOf('@'); + if (foundAtIndex > 0) { + return principalName.substring(0, foundAtIndex); + } + } + return principalName; + } + + private void handleException(Exception e, final ActionListener listener) { + if (e instanceof LoginException) { + listener.onResponse(AuthenticationResult.terminate("failed to authenticate user, service login failure", + unauthorized(e.getLocalizedMessage(), e))); + } else if (e instanceof GSSException) { + listener.onResponse(AuthenticationResult.terminate("failed to authenticate user, gss context negotiation failure", + unauthorized(e.getLocalizedMessage(), e))); + } else { + listener.onFailure(e); + } + } + + private void buildUser(final String username, final String outToken, final ActionListener listener) { + // if outToken is present then it needs to be communicated with peer, add it to + // response header in thread context. + if (Strings.hasText(outToken)) { + threadPool.getThreadContext().addResponseHeader(WWW_AUTHENTICATE, NEGOTIATE_AUTH_HEADER_PREFIX + outToken); + } + final User user = (userPrincipalNameToUserCache != null) ? userPrincipalNameToUserCache.get(username) : null; + if (user != null) { + /** + * TODO: bizybot If authorizing realms configured, resolve user from those + * realms and then return. + */ + listener.onResponse(AuthenticationResult.success(user)); + } else { + /** + * TODO: bizybot If authorizing realms configured, resolve user from those + * realms, cache it and then return. + */ + final UserRoleMapper.UserData userData = new UserRoleMapper.UserData(username, null, Collections.emptySet(), null, this.config); + userRoleMapper.resolveRoles(userData, ActionListener.wrap(roles -> { + final User computedUser = new User(username, roles.toArray(new String[roles.size()]), null, null, null, true); + if (userPrincipalNameToUserCache != null) { + userPrincipalNameToUserCache.put(username, computedUser); + } + listener.onResponse(AuthenticationResult.success(computedUser)); + }, listener::onFailure)); + } + } + + @Override + public void lookupUser(final String username, final ActionListener listener) { + listener.onResponse(null); + } +} \ No newline at end of file diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java new file mode 100644 index 0000000000000..bab899a866425 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheck.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.security.authc.RealmSettings; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import java.util.Map.Entry; + +/** + * This class is used to perform bootstrap checks for kerberos realm. + *

    + * We use service keytabs for validating incoming kerberos tickets and is a + * required configuration. Due to JVM wide system properties for Kerberos we + * cannot support multiple Kerberos realms. This class adds checks for node to + * fail if service keytab does not exist or multiple kerberos realms have been + * configured. + */ +public class KerberosRealmBootstrapCheck implements BootstrapCheck { + private final Environment env; + + public KerberosRealmBootstrapCheck(final Environment env) { + this.env = env; + } + + @Override + public BootstrapCheckResult check(final BootstrapContext context) { + final Map realmsSettings = RealmSettings.getRealmSettings(context.settings); + boolean isKerberosRealmConfigured = false; + for (final Entry entry : realmsSettings.entrySet()) { + final String name = entry.getKey(); + final Settings realmSettings = entry.getValue(); + final String type = realmSettings.get("type"); + if (Strings.hasText(type) == false) { + return BootstrapCheckResult.failure("missing realm type for [" + name + "] realm"); + } + if (KerberosRealmSettings.TYPE.equals(type)) { + if (isKerberosRealmConfigured) { + return BootstrapCheckResult.failure( + "multiple [" + type + "] realms are configured. [" + type + "] can only have one such realm configured"); + } + isKerberosRealmConfigured = true; + + final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(realmSettings)); + if (Files.exists(keytabPath) == false) { + return BootstrapCheckResult.failure("configured service key tab file [" + keytabPath + "] does not exist"); + } + } + } + return BootstrapCheckResult.success(); + } + + @Override + public boolean alwaysEnforce() { + return true; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java new file mode 100644 index 0000000000000..a63d90178dca4 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidator.java @@ -0,0 +1,273 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.ESLoggerFactory; +import org.ietf.jgss.GSSContext; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSException; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.Oid; + +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import javax.security.auth.Subject; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; + +/** + * Utility class that validates kerberos ticket for peer authentication. + *

    + * This class takes care of login by ES service credentials using keytab, + * GSSContext establishment, and then validating the incoming token. + *

    + * It may respond with token which needs to be communicated with the peer. + */ +public class KerberosTicketValidator { + static final Oid SPNEGO_OID = getSpnegoOid(); + + private static Oid getSpnegoOid() { + Oid oid = null; + try { + oid = new Oid("1.3.6.1.5.5.2"); + } catch (GSSException gsse) { + throw ExceptionsHelper.convertToRuntime(gsse); + } + return oid; + } + + private static final Logger LOGGER = ESLoggerFactory.getLogger(KerberosTicketValidator.class); + + private static final String KEY_TAB_CONF_NAME = "KeytabConf"; + private static final String SUN_KRB5_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule"; + + /** + * Validates client kerberos ticket received from the peer. + *

    + * First performs service login using keytab, supports multiple principals in + * keytab and the principal is selected based on the request. + *

    + * The GSS security context establishment state is handled as follows:
    + * If the context is established it will call {@link ActionListener#onResponse} + * with a {@link Tuple} of username and outToken for peer reply.
    + * If the context is not established then it will call + * {@link ActionListener#onResponse} with a Tuple where username is null but + * with a outToken that needs to be sent to peer for further negotiation.
    + * Never calls {@link ActionListener#onResponse} with a {@code null} tuple.
    + * On failure, it will call {@link ActionListener#onFailure(Exception)} + * + * @param decodedToken base64 decoded kerberos ticket bytes + * @param keytabPath Path to Service key tab file containing credentials for ES + * service. + * @param krbDebug if {@code true} enables jaas krb5 login module debug logs. + */ + public void validateTicket(final byte[] decodedToken, final Path keytabPath, final boolean krbDebug, + final ActionListener> actionListener) { + final GSSManager gssManager = GSSManager.getInstance(); + GSSContext gssContext = null; + LoginContext loginContext = null; + try { + loginContext = serviceLogin(keytabPath.toString(), krbDebug); + GSSCredential serviceCreds = createCredentials(gssManager, loginContext.getSubject()); + gssContext = gssManager.createContext(serviceCreds); + final String base64OutToken = encodeToString(acceptSecContext(decodedToken, gssContext, loginContext.getSubject())); + LOGGER.trace("validateTicket isGSSContextEstablished = {}, username = {}, outToken = {}", gssContext.isEstablished(), + gssContext.getSrcName().toString(), base64OutToken); + actionListener.onResponse(new Tuple<>(gssContext.isEstablished() ? gssContext.getSrcName().toString() : null, base64OutToken)); + } catch (GSSException e) { + actionListener.onFailure(e); + } catch (PrivilegedActionException pve) { + if (pve.getCause() instanceof LoginException) { + actionListener.onFailure((LoginException) pve.getCause()); + } else if (pve.getCause() instanceof GSSException) { + actionListener.onFailure((GSSException) pve.getCause()); + } else { + actionListener.onFailure(pve.getException()); + } + } finally { + privilegedLogoutNoThrow(loginContext); + privilegedDisposeNoThrow(gssContext); + } + } + + /** + * Encodes the specified byte array using base64 encoding scheme + * + * @param outToken byte array to be encoded + * @return String containing base64 encoded characters. returns {@code null} if + * outToken is null or empty. + */ + private String encodeToString(final byte[] outToken) { + if (outToken != null && outToken.length > 0) { + return Base64.getEncoder().encodeToString(outToken); + } + return null; + } + + /** + * Handles GSS context establishment. Received token is passed to the GSSContext + * on acceptor side and returns with out token that needs to be sent to peer for + * further GSS context establishment. + *

    + * + * @param base64decodedTicket in token generated by peer + * @param gssContext instance of acceptor {@link GSSContext} + * @param subject authenticated subject + * @return a byte[] containing the token to be sent to the peer. null indicates + * that no token is generated. + * @throws PrivilegedActionException + * @see GSSContext#acceptSecContext(byte[], int, int) + */ + private static byte[] acceptSecContext(final byte[] base64decodedTicket, final GSSContext gssContext, Subject subject) + throws PrivilegedActionException { + // process token with gss context + return doAsWrapper(subject, + (PrivilegedExceptionAction) () -> gssContext.acceptSecContext(base64decodedTicket, 0, base64decodedTicket.length)); + } + + /** + * For acquiring SPNEGO mechanism credentials for service based on the subject + * + * @param gssManager {@link GSSManager} + * @param subject logged in {@link Subject} + * @return {@link GSSCredential} for particular mechanism + * @throws PrivilegedActionException + */ + private static GSSCredential createCredentials(final GSSManager gssManager, final Subject subject) throws PrivilegedActionException { + return doAsWrapper(subject, (PrivilegedExceptionAction) () -> gssManager.createCredential(null, + GSSCredential.DEFAULT_LIFETIME, SPNEGO_OID, GSSCredential.ACCEPT_ONLY)); + } + + /** + * Privileged Wrapper that invokes action with Subject.doAs to perform work as + * given subject. + * + * @param subject {@link Subject} to be used for this work + * @param action {@link PrivilegedExceptionAction} action for performing inside + * Subject.doAs + * @return the value returned by the PrivilegedExceptionAction's run method + * @throws PrivilegedActionException + */ + private static T doAsWrapper(final Subject subject, final PrivilegedExceptionAction action) throws PrivilegedActionException { + try { + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> Subject.doAs(subject, action)); + } catch (PrivilegedActionException pae) { + if (pae.getCause() instanceof PrivilegedActionException) { + throw (PrivilegedActionException) pae.getCause(); + } + throw pae; + } + } + + /** + * Privileged wrapper for closing GSSContext, does not throw exceptions but logs + * them as a debug message. + * + * @param gssContext GSSContext to be disposed. + */ + private static void privilegedDisposeNoThrow(final GSSContext gssContext) { + if (gssContext != null) { + try { + AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + gssContext.dispose(); + return null; + }); + } catch (PrivilegedActionException e) { + LOGGER.debug("Could not dispose GSS Context", e.getCause()); + } + } + } + + /** + * Privileged wrapper for closing LoginContext, does not throw exceptions but + * logs them as a debug message. + * + * @param loginContext LoginContext to be closed + */ + private static void privilegedLogoutNoThrow(final LoginContext loginContext) { + if (loginContext != null) { + try { + AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + loginContext.logout(); + return null; + }); + } catch (PrivilegedActionException e) { + LOGGER.debug("Could not close LoginContext", e.getCause()); + } + } + } + + /** + * Performs authentication using provided keytab + * + * @param keytabFilePath Keytab file path + * @param krbDebug if {@code true} enables jaas krb5 login module debug logs. + * @return authenticated {@link LoginContext} instance. Note: This needs to be + * closed using {@link LoginContext#logout()} after usage. + * @throws PrivilegedActionException when privileged action threw exception + */ + private static LoginContext serviceLogin(final String keytabFilePath, final boolean krbDebug) throws PrivilegedActionException { + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + final Subject subject = new Subject(false, Collections.emptySet(), Collections.emptySet(), Collections.emptySet()); + final Configuration conf = new KeytabJaasConf(keytabFilePath, krbDebug); + final LoginContext loginContext = new LoginContext(KEY_TAB_CONF_NAME, subject, null, conf); + loginContext.login(); + return loginContext; + }); + } + + /** + * Usually we would have a JAAS configuration file for login configuration. As + * we have static configuration except debug flag, we are constructing in + * memory. This avoids additional configuration required from the user. + *

    + * As we are using this instead of jaas.conf, this requires refresh of + * {@link Configuration} and requires appropriate security permissions to do so. + */ + static class KeytabJaasConf extends Configuration { + private final String keytabFilePath; + private final boolean krbDebug; + + KeytabJaasConf(final String keytabFilePath, final boolean krbDebug) { + this.keytabFilePath = keytabFilePath; + this.krbDebug = krbDebug; + } + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(final String name) { + final Map options = new HashMap<>(); + options.put("keyTab", keytabFilePath); + /* + * As acceptor, we can have multiple SPNs, we do not want to use particular + * principal so it uses "*" + */ + options.put("principal", "*"); + options.put("useKeyTab", Boolean.TRUE.toString()); + options.put("storeKey", Boolean.TRUE.toString()); + options.put("doNotPrompt", Boolean.TRUE.toString()); + options.put("isInitiator", Boolean.FALSE.toString()); + options.put("debug", Boolean.toString(krbDebug)); + + return new AppConfigurationEntry[] { new AppConfigurationEntry(SUN_KRB5_LOGIN_MODULE, + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, Collections.unmodifiableMap(options)) }; + } + + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 09de32643ed93..642bc167f7d4a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -178,8 +178,8 @@ public void authorize(Authentication authentication, String action, TransportReq // first, we'll check if the action is a cluster action. If it is, we'll only check it against the cluster permissions if (ClusterPrivilege.ACTION_MATCHER.test(action)) { - ClusterPermission cluster = permission.cluster(); - if (cluster.check(action) || checkSameUserPermissions(action, request, authentication)) { + final ClusterPermission cluster = permission.cluster(); + if (cluster.check(action, request) || checkSameUserPermissions(action, request, authentication) ) { putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, IndicesAccessControl.ALLOW_ALL); auditTrail.accessGranted(authentication, action, request, permission.names()); return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index 1018ceeda093c..beb2ca60fb2ae 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -28,7 +29,8 @@ import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition.FieldGrantExcludeGroup; import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; @@ -51,6 +53,7 @@ import java.util.function.BiConsumer; import java.util.stream.Collectors; +import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.xpack.core.security.SecurityField.setting; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; @@ -80,6 +83,7 @@ public class CompositeRolesStore extends AbstractComponent { private final FileRolesStore fileRolesStore; private final NativeRolesStore nativeRolesStore; private final ReservedRolesStore reservedRolesStore; + private final NativePrivilegeStore privilegeStore; private final XPackLicenseState licenseState; private final Cache, Role> roleCache; private final Set negativeLookupCache; @@ -88,7 +92,7 @@ public class CompositeRolesStore extends AbstractComponent { private final List, ActionListener>>> customRolesProviders; public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, NativeRolesStore nativeRolesStore, - ReservedRolesStore reservedRolesStore, + ReservedRolesStore reservedRolesStore, NativePrivilegeStore privilegeStore, List, ActionListener>>> rolesProviders, ThreadContext threadContext, XPackLicenseState licenseState) { super(settings); @@ -98,6 +102,7 @@ public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, Nat fileRolesStore.addListener(this::invalidateAll); this.nativeRolesStore = nativeRolesStore; this.reservedRolesStore = reservedRolesStore; + this.privilegeStore = privilegeStore; this.licenseState = licenseState; CacheBuilder, Role> builder = CacheBuilder.builder(); final int cacheSize = CACHE_SIZE_SETTING.get(settings); @@ -117,31 +122,33 @@ public void roles(Set roleNames, FieldPermissionsCache fieldPermissionsC } else { final long invalidationCounter = numInvalidation.get(); roleDescriptors(roleNames, ActionListener.wrap( - (descriptors) -> { - final Role role; + descriptors -> { + final Set effectiveDescriptors; if (licenseState.isDocumentAndFieldLevelSecurityAllowed()) { - role = buildRoleFromDescriptors(descriptors, fieldPermissionsCache); + effectiveDescriptors = descriptors; } else { - final Set filtered = descriptors.stream() + effectiveDescriptors = descriptors.stream() .filter((rd) -> rd.isUsingDocumentOrFieldLevelSecurity() == false) .collect(Collectors.toSet()); - role = buildRoleFromDescriptors(filtered, fieldPermissionsCache); } - - if (role != null) { - try (ReleasableLock ignored = readLock.acquire()) { - /* this is kinda spooky. We use a read/write lock to ensure we don't modify the cache if we hold the write - * lock (fetching stats for instance - which is kinda overkill?) but since we fetching stuff in an async - * fashion we need to make sure that if the cache got invalidated since we started the request we don't - * put a potential stale result in the cache, hence the numInvalidation.get() comparison to the number of - * invalidation when we started. we just try to be on the safe side and don't cache potentially stale - * results*/ - if (invalidationCounter == numInvalidation.get()) { - roleCache.computeIfAbsent(roleNames, (s) -> role); + logger.trace("Building role from descriptors [{}] for names [{}]", effectiveDescriptors, roleNames); + buildRoleFromDescriptors(effectiveDescriptors, fieldPermissionsCache, privilegeStore, ActionListener.wrap(role -> { + if (role != null) { + try (ReleasableLock ignored = readLock.acquire()) { + /* this is kinda spooky. We use a read/write lock to ensure we don't modify the cache if we hold + * the write lock (fetching stats for instance - which is kinda overkill?) but since we fetching + * stuff in an async fashion we need to make sure that if the cache got invalidated since we + * started the request we don't put a potential stale result in the cache, hence the + * numInvalidation.get() comparison to the number of invalidation when we started. we just try to + * be on the safe side and don't cache potentially stale results + */ + if (invalidationCounter == numInvalidation.get()) { + roleCache.computeIfAbsent(roleNames, (s) -> role); + } } } - } - roleActionListener.onResponse(role); + roleActionListener.onResponse(role); + }, roleActionListener::onFailure)); }, roleActionListener::onFailure)); } @@ -238,25 +245,36 @@ private Set difference(Set roleNames, Set descri return Sets.difference(roleNames, foundNames); } - public static Role buildRoleFromDescriptors(Set roleDescriptors, FieldPermissionsCache fieldPermissionsCache) { + public static void buildRoleFromDescriptors(Collection roleDescriptors, FieldPermissionsCache fieldPermissionsCache, + NativePrivilegeStore privilegeStore, ActionListener listener) { if (roleDescriptors.isEmpty()) { - return Role.EMPTY; + listener.onResponse(Role.EMPTY); + return; } + Set clusterPrivileges = new HashSet<>(); + final List conditionalClusterPrivileges = new ArrayList<>(); Set runAs = new HashSet<>(); Map, MergeableIndicesPrivilege> indicesPrivilegesMap = new HashMap<>(); + + // Keyed by application + resource + Map>, Set> applicationPrivilegesMap = new HashMap<>(); + List roleNames = new ArrayList<>(roleDescriptors.size()); for (RoleDescriptor descriptor : roleDescriptors) { roleNames.add(descriptor.getName()); if (descriptor.getClusterPrivileges() != null) { clusterPrivileges.addAll(Arrays.asList(descriptor.getClusterPrivileges())); } + if (descriptor.getConditionalClusterPrivileges() != null) { + conditionalClusterPrivileges.addAll(Arrays.asList(descriptor.getConditionalClusterPrivileges())); + } if (descriptor.getRunAs() != null) { runAs.addAll(Arrays.asList(descriptor.getRunAs())); } IndicesPrivileges[] indicesPrivileges = descriptor.getIndicesPrivileges(); for (IndicesPrivileges indicesPrivilege : indicesPrivileges) { - Set key = Sets.newHashSet(indicesPrivilege.getIndices()); + Set key = newHashSet(indicesPrivilege.getIndices()); // if a index privilege is an explicit denial, then we treat it as non-existent since we skipped these in the past when // merging final boolean isExplicitDenial = @@ -274,19 +292,44 @@ public static Role buildRoleFromDescriptors(Set roleDescriptors, }); } } + for (RoleDescriptor.ApplicationResourcePrivileges appPrivilege : descriptor.getApplicationPrivileges()) { + Tuple> key = new Tuple<>(appPrivilege.getApplication(), newHashSet(appPrivilege.getResources())); + applicationPrivilegesMap.compute(key, (k, v) -> { + if (v == null) { + return newHashSet(appPrivilege.getPrivileges()); + } else { + v.addAll(Arrays.asList(appPrivilege.getPrivileges())); + return v; + } + }); + } } - final Set clusterPrivs = clusterPrivileges.isEmpty() ? null : clusterPrivileges; final Privilege runAsPrivilege = runAs.isEmpty() ? Privilege.NONE : new Privilege(runAs, runAs.toArray(Strings.EMPTY_ARRAY)); - Role.Builder builder = Role.builder(roleNames.toArray(new String[roleNames.size()])) - .cluster(ClusterPrivilege.get(clusterPrivs)) + final Role.Builder builder = Role.builder(roleNames.toArray(new String[roleNames.size()])) + .cluster(clusterPrivileges, conditionalClusterPrivileges) .runAs(runAsPrivilege); indicesPrivilegesMap.entrySet().forEach((entry) -> { MergeableIndicesPrivilege privilege = entry.getValue(); builder.add(fieldPermissionsCache.getFieldPermissions(privilege.fieldPermissionsDefinition), privilege.query, IndexPrivilege.get(privilege.privileges), privilege.indices.toArray(Strings.EMPTY_ARRAY)); }); - return builder.build(); + + if (applicationPrivilegesMap.isEmpty()) { + listener.onResponse(builder.build()); + } else { + final Set applicationNames = applicationPrivilegesMap.keySet().stream() + .map(Tuple::v1) + .collect(Collectors.toSet()); + final Set applicationPrivilegeNames = applicationPrivilegesMap.values().stream() + .flatMap(Collection::stream) + .collect(Collectors.toSet()); + privilegeStore.getPrivileges(applicationNames, applicationPrivilegeNames, ActionListener.wrap(appPrivileges -> { + applicationPrivilegesMap.forEach((key, names) -> + builder.addApplicationPrivilege(ApplicationPrivilege.get(key.v1(), names, appPrivileges), key.v2())); + listener.onResponse(builder.build()); + }, listener::onFailure)); + } } public void invalidateAll() { @@ -340,11 +383,11 @@ private static class MergeableIndicesPrivilege { MergeableIndicesPrivilege(String[] indices, String[] privileges, @Nullable String[] grantedFields, @Nullable String[] deniedFields, @Nullable BytesReference query) { - this.indices = Sets.newHashSet(Objects.requireNonNull(indices)); - this.privileges = Sets.newHashSet(Objects.requireNonNull(privileges)); + this.indices = newHashSet(Objects.requireNonNull(indices)); + this.privileges = newHashSet(Objects.requireNonNull(privileges)); this.fieldPermissionsDefinition = new FieldPermissionsDefinition(grantedFields, deniedFields); if (query != null) { - this.query = Sets.newHashSet(query); + this.query = newHashSet(query); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java new file mode 100644 index 0000000000000..807cfff6c2c19 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.store; + +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.ContextPreservingActionListener; +import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.action.support.TransportActions; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.security.ScrollHelper; +import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest; +import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheResponse; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Supplier; +import java.util.stream.Collector; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; +import static org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor.DOC_TYPE_VALUE; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; + +/** + * {@code NativePrivilegeStore} is a store that reads/writes {@link ApplicationPrivilegeDescriptor} objects, + * from an Elasticsearch index. + */ +public class NativePrivilegeStore extends AbstractComponent { + + private static final Collector, ?, Map>> TUPLES_TO_MAP = Collectors.toMap( + Tuple::v1, + t -> CollectionUtils.newSingletonArrayList(t.v2()), (a, b) -> { + a.addAll(b); + return a; + }); + + private final Client client; + private final SecurityClient securityClient; + private final SecurityIndexManager securityIndexManager; + + public NativePrivilegeStore(Settings settings, Client client, SecurityIndexManager securityIndexManager) { + super(settings); + this.client = client; + this.securityClient = new SecurityClient(client); + this.securityIndexManager = securityIndexManager; + } + + public void getPrivileges(Collection applications, Collection names, + ActionListener> listener) { + if (applications != null && applications.size() == 1 && names != null && names.size() == 1) { + getPrivilege(Objects.requireNonNull(Iterables.get(applications, 0)), Objects.requireNonNull(Iterables.get(names, 0)), + ActionListener.wrap(privilege -> + listener.onResponse(privilege == null ? Collections.emptyList() : Collections.singletonList(privilege)), + listener::onFailure)); + } else { + securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + final QueryBuilder query; + final TermQueryBuilder typeQuery = QueryBuilders + .termQuery(ApplicationPrivilegeDescriptor.Fields.TYPE.getPreferredName(), DOC_TYPE_VALUE); + if (isEmpty(applications) && isEmpty(names)) { + query = typeQuery; + } else if (isEmpty(names)) { + query = QueryBuilders.boolQuery().filter(typeQuery).filter( + QueryBuilders.termsQuery(ApplicationPrivilegeDescriptor.Fields.APPLICATION.getPreferredName(), applications)); + } else if (isEmpty(applications)) { + query = QueryBuilders.boolQuery().filter(typeQuery) + .filter(QueryBuilders.termsQuery(ApplicationPrivilegeDescriptor.Fields.NAME.getPreferredName(), names)); + } else { + final String[] docIds = applications.stream() + .flatMap(a -> names.stream().map(n -> toDocId(a, n))) + .toArray(String[]::new); + query = QueryBuilders.boolQuery().filter(typeQuery).filter(QueryBuilders.idsQuery("doc").addIds(docIds)); + } + final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { + SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) + .setScroll(TimeValue.timeValueSeconds(10L)) + .setQuery(query) + .setSize(1000) + .setFetchSource(true) + .request(); + logger.trace(() -> + new ParameterizedMessage("Searching for privileges [{}] with query [{}]", names, Strings.toString(query))); + request.indicesOptions().ignoreUnavailable(); + ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), + hit -> buildPrivilege(hit.getId(), hit.getSourceRef())); + } + }); + } + } + + private static boolean isEmpty(Collection collection) { + return collection == null || collection.isEmpty(); + } + + public void getPrivilege(String application, String name, ActionListener listener) { + securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareGet(SECURITY_INDEX_NAME, "doc", toDocId(application, name)).request(), + new ActionListener() { + @Override + public void onResponse(GetResponse response) { + if (response.isExists()) { + listener.onResponse(buildPrivilege(response.getId(), response.getSourceAsBytesRef())); + } else { + listener.onResponse(null); + } + } + + @Override + public void onFailure(Exception e) { + // if the index or the shard is not there / available we just claim the privilege is not there + if (TransportActions.isShardNotAvailableException(e)) { + logger.warn(new ParameterizedMessage("failed to load privilege [{}] index not available", name), e); + listener.onResponse(null); + } else { + logger.error(new ParameterizedMessage("failed to load privilege [{}]", name), e); + listener.onFailure(e); + } + } + }, + client::get)); + } + + public void putPrivileges(Collection privileges, WriteRequest.RefreshPolicy refreshPolicy, + ActionListener>> listener) { + securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + ActionListener groupListener = new GroupedActionListener<>( + ActionListener.wrap((Collection responses) -> { + final Map> createdNames = responses.stream() + .filter(r -> r.getResult() == DocWriteResponse.Result.CREATED) + .map(r -> r.getId()) + .map(NativePrivilegeStore::nameFromDocId) + .collect(TUPLES_TO_MAP); + clearRolesCache(listener, createdNames); + }, listener::onFailure), privileges.size(), Collections.emptyList()); + for (ApplicationPrivilegeDescriptor privilege : privileges) { + innerPutPrivilege(privilege, refreshPolicy, groupListener); + } + }); + } + + private void innerPutPrivilege(ApplicationPrivilegeDescriptor privilege, WriteRequest.RefreshPolicy refreshPolicy, + ActionListener listener) { + try { + final String name = privilege.getName(); + final XContentBuilder xContentBuilder = privilege.toXContent(jsonBuilder(), true); + ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareIndex(SECURITY_INDEX_NAME, "doc", toDocId(privilege.getApplication(), name)) + .setSource(xContentBuilder) + .setRefreshPolicy(refreshPolicy) + .request(), listener, client::index); + } catch (Exception e) { + logger.warn("Failed to put privilege {} - {}", Strings.toString(privilege), e.toString()); + listener.onFailure(e); + } + + } + + public void deletePrivileges(String application, Collection names, WriteRequest.RefreshPolicy refreshPolicy, + ActionListener>> listener) { + securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + ActionListener groupListener = new GroupedActionListener<>( + ActionListener.wrap(responses -> { + final Map> deletedNames = responses.stream() + .filter(r -> r.getResult() == DocWriteResponse.Result.DELETED) + .map(r -> r.getId()) + .map(NativePrivilegeStore::nameFromDocId) + .collect(TUPLES_TO_MAP); + clearRolesCache(listener, deletedNames); + }, listener::onFailure), names.size(), Collections.emptyList()); + for (String name : names) { + ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareDelete(SECURITY_INDEX_NAME, "doc", toDocId(application, name)) + .setRefreshPolicy(refreshPolicy) + .request(), groupListener, client::delete); + } + }); + } + + private void clearRolesCache(ActionListener listener, T value) { + // This currently clears _all_ roles, but could be improved to clear only those roles that reference the affected application + ClearRolesCacheRequest request = new ClearRolesCacheRequest(); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + new ActionListener() { + @Override + public void onResponse(ClearRolesCacheResponse nodes) { + listener.onResponse(value); + } + + @Override + public void onFailure(Exception e) { + logger.error("unable to clear role cache", e); + listener.onFailure( + new ElasticsearchException("clearing the role cache failed. please clear the role cache manually", e)); + } + }, securityClient::clearRolesCache); + } + + private ApplicationPrivilegeDescriptor buildPrivilege(String docId, BytesReference source) { + logger.trace("Building privilege from [{}] [{}]", docId, source == null ? "<>" : source.utf8ToString()); + if (source == null) { + return null; + } + final Tuple name = nameFromDocId(docId); + try { + // EMPTY is safe here because we never use namedObject + + try (StreamInput input = source.streamInput(); + XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, input)) { + final ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, null, null, true); + assert privilege.getApplication().equals(name.v1()) + : "Incorrect application name for privilege. Expected [" + name.v1() + "] but was " + privilege.getApplication(); + assert privilege.getName().equals(name.v2()) + : "Incorrect name for application privilege. Expected [" + name.v2() + "] but was " + privilege.getName(); + return privilege; + } + } catch (IOException | XContentParseException e) { + logger.error(new ParameterizedMessage("cannot parse application privilege [{}]", name), e); + return null; + } + } + + private static Tuple nameFromDocId(String docId) { + final String name = docId.substring(DOC_TYPE_VALUE.length() + 1); + assert name != null && name.length() > 0 : "Invalid name '" + name + "'"; + final int colon = name.indexOf(':'); + assert colon > 0 : "Invalid name '" + name + "' (missing colon)"; + return new Tuple<>(name.substring(0, colon), name.substring(colon + 1)); + } + + private static String toDocId(String application, String name) { + return DOC_TYPE_VALUE + "_" + application + ":" + name; + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 9093b6a66739e..e578a4005c4ee 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.MultiSearchResponse.Item; @@ -63,6 +64,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.security.SecurityField.setting; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ROLE_TYPE; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; /** * NativeRolesStore is a {@code RolesStore} that, instead of reading from a @@ -173,15 +175,17 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final listener.onFailure(e); return; } + final IndexRequest indexRequest = client.prepareIndex(SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) + .setSource(xContentBuilder) + .setRefreshPolicy(request.getRefreshPolicy()) + .request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) - .setSource(xContentBuilder) - .setRefreshPolicy(request.getRefreshPolicy()) - .request(), + indexRequest, new ActionListener() { @Override public void onResponse(IndexResponse indexResponse) { final boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; + logger.trace("Created role: [{}]", indexRequest); clearRoleCache(role.getName(), listener, created); } @@ -234,7 +238,6 @@ public void onResponse(MultiSearchResponse items) { } else { usageStats.put("size", responses[0].getResponse().getHits().getTotalHits()); } - if (responses[1].isFailure()) { usageStats.put("fls", false); } else { @@ -289,7 +292,7 @@ public void onFailure(Exception e) { private void executeGetRoleRequest(String role, ActionListener listener) { securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, + client.prepareGet(SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role)).request(), listener, client::get)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index 0b2642ae5bec4..9006ec620b543 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -56,7 +56,7 @@ protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClie /** * Check whether the given request is allowed within the current license state and setup, * and return the name of any unlicensed feature. - * By default this returns an exception is security is not available by the current license or + * By default this returns an exception if security is not available by the current license or * security is not enabled. * Sub-classes can override this method if they have additional requirements. * diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java new file mode 100644 index 0000000000000..d0cee0dd6b902 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestDeletePrivilegesAction.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.privilege; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesResponse; +import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; + +/** + * Rest action to delete one or more privileges from the security index + */ +public class RestDeletePrivilegesAction extends SecurityBaseRestHandler { + + public RestDeletePrivilegesAction(Settings settings, RestController controller, XPackLicenseState licenseState) { + super(settings, licenseState); + controller.registerHandler(DELETE, "/_xpack/security/privilege/{application}/{privilege}", this); + } + + @Override + public String getName() { + return "xpack_security_delete_privilege_action"; + } + + @Override + public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + final String application = request.param("application"); + final String[] privileges = request.paramAsStringArray("privilege", null); + final String refresh = request.param("refresh"); + return channel -> new SecurityClient(client).prepareDeletePrivileges(application, privileges) + .setRefreshPolicy(refresh) + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(DeletePrivilegesResponse response, XContentBuilder builder) throws Exception { + builder.startObject(); + builder.startObject(application); + for (String privilege : new HashSet<>(Arrays.asList(privileges))) { + builder.field(privilege, Collections.singletonMap("found", response.found().contains(privilege))); + } + builder.endObject(); + builder.endObject(); + return new BytesRestResponse(response.found().isEmpty() ? RestStatus.NOT_FOUND : RestStatus.OK, builder); + } + }); + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java new file mode 100644 index 0000000000000..8e3c3bbb87e6e --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestGetPrivilegesAction.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.privilege; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +/** + * Rest action to retrieve an application privilege from the security index + */ +public class RestGetPrivilegesAction extends SecurityBaseRestHandler { + + public RestGetPrivilegesAction(Settings settings, RestController controller, XPackLicenseState licenseState) { + super(settings, licenseState); + controller.registerHandler(GET, "/_xpack/security/privilege/", this); + controller.registerHandler(GET, "/_xpack/security/privilege/{application}", this); + controller.registerHandler(GET, "/_xpack/security/privilege/{application}/{privilege}", this); + } + + @Override + public String getName() { + return "xpack_security_get_privileges_action"; + } + + @Override + public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + final String application = request.param("application"); + final String[] privileges = request.paramAsStringArray("privilege", Strings.EMPTY_ARRAY); + + return channel -> new SecurityClient(client).prepareGetPrivileges(application, privileges) + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(GetPrivilegesResponse response, XContentBuilder builder) throws Exception { + final Map> privsByApp = groupByApplicationName(response.privileges()); + builder.startObject(); + for (String app : privsByApp.keySet()) { + builder.startObject(app); + for (ApplicationPrivilegeDescriptor privilege : privsByApp.get(app)) { + builder.field(privilege.getName(), privilege); + } + builder.endObject(); + } + builder.endObject(); + + // if the user asked for specific privileges, but none of them were found + // we'll return an empty result and 404 status code + if (privileges.length != 0 && response.privileges().length == 0) { + return new BytesRestResponse(RestStatus.NOT_FOUND, builder); + } + + // either the user asked for all privileges, or at least one of the privileges + // was found + return new BytesRestResponse(RestStatus.OK, builder); + } + }); + } + + static Map> groupByApplicationName(ApplicationPrivilegeDescriptor[] privileges) { + return Arrays.stream(privileges).collect(Collectors.toMap( + ApplicationPrivilegeDescriptor::getApplication, + Collections::singleton, + Sets::union + )); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegeAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegeAction.java new file mode 100644 index 0000000000000..6c3ef8e70fabf --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegeAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.privilege; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequestBuilder; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +/** + * Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index + */ +public class RestPutPrivilegeAction extends SecurityBaseRestHandler { + + public RestPutPrivilegeAction(Settings settings, RestController controller, XPackLicenseState licenseState) { + super(settings, licenseState); + controller.registerHandler(PUT, "/_xpack/security/privilege/{application}/{privilege}", this); + controller.registerHandler(POST, "/_xpack/security/privilege/{application}/{privilege}", this); + } + + @Override + public String getName() { + return "xpack_security_put_privilege_action"; + } + + @Override + public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + final String application = request.param("application"); + final String privilege = request.param("privilege"); + PutPrivilegesRequestBuilder requestBuilder = new SecurityClient(client) + .preparePutPrivilege(application, privilege, request.requiredContent(), request.getXContentType()) + .setRefreshPolicy(request.param("refresh")); + + return RestPutPrivilegesAction.execute(requestBuilder); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java new file mode 100644 index 0000000000000..eb1104c9bc036 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.privilege; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequestBuilder; +import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +/** + * Rest endpoint to add one or more {@link ApplicationPrivilege} objects to the security index + */ +public class RestPutPrivilegesAction extends SecurityBaseRestHandler { + + public RestPutPrivilegesAction(Settings settings, RestController controller, XPackLicenseState licenseState) { + super(settings, licenseState); + controller.registerHandler(POST, "/_xpack/security/privilege/", this); + } + + @Override + public String getName() { + return "xpack_security_put_privileges_action"; + } + + @Override + public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + PutPrivilegesRequestBuilder requestBuilder = new SecurityClient(client) + .preparePutPrivileges(request.requiredContent(), request.getXContentType()) + .setRefreshPolicy(request.param("refresh")); + + return execute(requestBuilder); + } + + static RestChannelConsumer execute(PutPrivilegesRequestBuilder requestBuilder) { + return channel -> requestBuilder.execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(PutPrivilegesResponse response, XContentBuilder builder) throws Exception { + final List privileges = requestBuilder.request().getPrivileges(); + Map>> result = new HashMap<>(); + privileges.stream() + .map(ApplicationPrivilegeDescriptor::getApplication) + .distinct() + .forEach(a -> result.put(a, new HashMap<>())); + privileges.forEach(privilege -> { + String name = privilege.getName(); + boolean created = response.created().getOrDefault(privilege.getApplication(), Collections.emptyList()).contains(name); + result.get(privilege.getApplication()).put(name, Collections.singletonMap("created", created)); + }); + builder.map(result); + return new BytesRestResponse(RestStatus.OK, builder); + } + }); + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java index cc566c212cfb8..4949b18366df9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesAction.java @@ -6,8 +6,11 @@ package org.elasticsearch.xpack.security.rest.action.user; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -24,6 +27,8 @@ import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; import java.io.IOException; +import java.util.List; +import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -54,8 +59,8 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { final String username = getUsername(request); - HasPrivilegesRequestBuilder requestBuilder = new SecurityClient(client) - .prepareHasPrivileges(username, request.requiredContent(), request.getXContentType()); + final Tuple content = request.contentOrSourceParam(); + HasPrivilegesRequestBuilder requestBuilder = new SecurityClient(client).prepareHasPrivileges(username, content.v2(), content.v1()); return channel -> requestBuilder.execute(new HasPrivilegesRestResponseBuilder(username, channel)); } @@ -84,10 +89,12 @@ public RestResponse buildResponse(HasPrivilegesResponse response, XContentBuilde builder.field("cluster"); builder.map(response.getClusterPrivileges()); - builder.startObject("index"); - for (HasPrivilegesResponse.IndexPrivileges index : response.getIndexPrivileges()) { - builder.field(index.getIndex()); - builder.map(index.getPrivileges()); + appendResources(builder, "index", response.getIndexPrivileges()); + + builder.startObject("application"); + final Map> appPrivileges = response.getApplicationPrivileges(); + for (String app : appPrivileges.keySet()) { + appendResources(builder, app, appPrivileges.get(app)); } builder.endObject(); @@ -95,5 +102,15 @@ public RestResponse buildResponse(HasPrivilegesResponse response, XContentBuilde return new BytesRestResponse(RestStatus.OK, builder); } + private void appendResources(XContentBuilder builder, String field, List privileges) + throws IOException { + builder.startObject(field); + for (HasPrivilegesResponse.ResourcePrivileges privilege : privileges) { + builder.field(privilege.getResource()); + builder.map(privilege.getPrivileges()); + } + builder.endObject(); + } + } } diff --git a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy index 857c2f6e472d5..8ce72be3ef9a7 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/security/src/main/plugin-metadata/plugin-security.policy @@ -10,6 +10,21 @@ grant { // needed for multiple server implementations used in tests permission java.net.SocketPermission "*", "accept,connect"; + + // needed for Kerberos login + permission javax.security.auth.AuthPermission "modifyPrincipals"; + permission javax.security.auth.AuthPermission "modifyPrivateCredentials"; + permission javax.security.auth.PrivateCredentialPermission "javax.security.auth.kerberos.KerberosKey * \"*\"", "read"; + permission javax.security.auth.PrivateCredentialPermission "javax.security.auth.kerberos.KeyTab * \"*\"", "read"; + permission javax.security.auth.PrivateCredentialPermission "javax.security.auth.kerberos.KerberosTicket * \"*\"", "read"; + permission javax.security.auth.AuthPermission "doAs"; + permission javax.security.auth.kerberos.ServicePermission "*","initiate,accept"; + + permission java.util.PropertyPermission "javax.security.auth.useSubjectCredsOnly","write"; + permission java.util.PropertyPermission "java.security.krb5.conf","write"; + permission java.util.PropertyPermission "sun.security.krb5.debug","write"; + permission java.util.PropertyPermission "java.security.debug","write"; + permission java.util.PropertyPermission "sun.security.spnego.debug","write"; }; grant codeBase "${codebase.xmlsec-2.0.8.jar}" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaSystemRoleIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaSystemRoleIntegTests.java new file mode 100644 index 0000000000000..65fa6027c627b --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaSystemRoleIntegTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.integration; + +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; + +import java.util.Locale; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.hamcrest.Matchers.is; + +public class KibanaSystemRoleIntegTests extends SecurityIntegTestCase { + + protected static final SecureString USERS_PASSWD = new SecureString("change_me".toCharArray()); + + @Override + public String configUsers() { + final String usersPasswdHashed = new String(getFastStoredHashAlgoForTests().hash(USERS_PASSWD)); + return super.configUsers() + + "kibana_system:" + usersPasswdHashed; + } + + @Override + public String configUsersRoles() { + return super.configUsersRoles() + + "kibana_system:kibana_system"; + } + + + public void testCreateIndexDeleteInKibanaIndex() throws Exception { + final String index = randomBoolean()? ".kibana" : ".kibana-" + randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ENGLISH); + + if (randomBoolean()) { + CreateIndexResponse createIndexResponse = client().filterWithHeader(singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue("kibana_system", USERS_PASSWD))) + .admin().indices().prepareCreate(index).get(); + assertThat(createIndexResponse.isAcknowledged(), is(true)); + } + + IndexResponse response = client() + .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_system", USERS_PASSWD))) + .prepareIndex() + .setIndex(index) + .setType("dashboard") + .setSource("foo", "bar") + .setRefreshPolicy(IMMEDIATE) + .get(); + assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); + + DeleteResponse deleteResponse = client() + .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_system", USERS_PASSWD))) + .prepareDelete(index, "dashboard", response.getId()) + .get(); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index cc080a846fae3..b5b939e174410 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import java.util.Locale; @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class KibanaUserRoleIntegTests extends SecurityIntegTestCase { +public class KibanaUserRoleIntegTests extends NativeRealmIntegTestCase { protected static final SecureString USERS_PASSWD = new SecureString("change_me".toCharArray()); @@ -154,25 +154,25 @@ public void testCreateIndexDeleteInKibanaIndex() throws Exception { if (randomBoolean()) { CreateIndexResponse createIndexResponse = client().filterWithHeader(singletonMap("Authorization", - UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) - .admin().indices().prepareCreate(index).get(); + UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) + .admin().indices().prepareCreate(index).get(); assertThat(createIndexResponse.isAcknowledged(), is(true)); } IndexResponse response = client() - .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) - .prepareIndex() - .setIndex(index) - .setType("dashboard") - .setSource("foo", "bar") - .setRefreshPolicy(IMMEDIATE) - .get(); + .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) + .prepareIndex() + .setIndex(index) + .setType("dashboard") + .setSource("foo", "bar") + .setRefreshPolicy(IMMEDIATE) + .get(); assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); DeleteResponse deleteResponse = client() - .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) - .prepareDelete(index, "dashboard", response.getId()) - .get(); + .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) + .prepareDelete(index, "dashboard", response.getId()) + .get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilderTests.java new file mode 100644 index 0000000000000..db0548c03ef30 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/core/security/action/privilege/PutPrivilegesRequestBuilderTests.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.privilege; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; + +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.notNullValue; + +public class PutPrivilegesRequestBuilderTests extends ESTestCase { + + public void testBuildRequestWithMultipleElements() throws Exception { + final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); + builder.source(new BytesArray("{ " + + "\"foo\":{" + + " \"read\":{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }," + + " \"write\":{ \"application\":\"foo\", \"name\":\"write\", \"actions\":[ \"data:/write/*\", \"admin:*\" ] }," + + " \"all\":{ \"application\":\"foo\", \"name\":\"all\", \"actions\":[ \"*\" ] }" + + " }, " + + "\"bar\":{" + + " \"read\":{ \"application\":\"bar\", \"name\":\"read\", \"actions\":[ \"read/*\" ] }," + + " \"write\":{ \"application\":\"bar\", \"name\":\"write\", \"actions\":[ \"write/*\" ] }," + + " \"all\":{ \"application\":\"bar\", \"name\":\"all\", \"actions\":[ \"*\" ] }" + + " } " + + "}"), XContentType.JSON); + final List privileges = builder.request().getPrivileges(); + assertThat(privileges, iterableWithSize(6)); + assertThat(privileges, contains( + descriptor("foo", "read", "data:/read/*", "admin:/read/*"), + descriptor("foo", "write", "data:/write/*", "admin:*"), + descriptor("foo", "all", "*"), + descriptor("bar", "read", "read/*"), + descriptor("bar", "write", "write/*"), + descriptor("bar", "all", "*") + )); + } + + private ApplicationPrivilegeDescriptor descriptor(String app, String name, String ... actions) { + return new ApplicationPrivilegeDescriptor(app, name, Sets.newHashSet(actions), Collections.emptyMap()); + } + + public void testBuildRequestFromJsonObject() throws Exception { + final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); + builder.source("foo", "read", new BytesArray( + "{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }" + ), XContentType.JSON); + final List privileges = builder.request().getPrivileges(); + assertThat(privileges, iterableWithSize(1)); + assertThat(privileges, contains(descriptor("foo", "read", "data:/read/*", "admin:/read/*"))); + } + + public void testPrivilegeNameValidationOfSingleElement() throws Exception { + final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + builder.source("foo", "write", new BytesArray( + "{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }" + ), XContentType.JSON)); + assertThat(exception.getMessage(), containsString("write")); + assertThat(exception.getMessage(), containsString("read")); + } + + public void testApplicationNameValidationOfSingleElement() throws Exception { + final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + builder.source("bar", "read", new BytesArray( + "{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }" + ), XContentType.JSON)); + assertThat(exception.getMessage(), containsString("foo")); + assertThat(exception.getMessage(), containsString("bar")); + } + + public void testPrivilegeNameValidationOfMultipleElement() throws Exception { + final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + builder.source(new BytesArray("{ \"foo\":{" + + "\"write\":{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[\"data:/read/*\",\"admin:/read/*\"] }," + + "\"all\":{ \"application\":\"foo\", \"name\":\"all\", \"actions\":[ \"/*\" ] }" + + "} }"), XContentType.JSON) + ); + assertThat(exception.getMessage(), containsString("write")); + assertThat(exception.getMessage(), containsString("read")); + } + + public void testApplicationNameValidationOfMultipleElement() throws Exception { + final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + builder.source(new BytesArray("{ \"bar\":{" + + "\"read\":{ \"application\":\"foo\", \"name\":\"read\", \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }," + + "\"write\":{ \"application\":\"foo\", \"name\":\"write\", \"actions\":[ \"data:/write/*\", \"admin:/*\" ] }," + + "\"all\":{ \"application\":\"foo\", \"name\":\"all\", \"actions\":[ \"/*\" ] }" + + "} }"), XContentType.JSON) + ); + assertThat(exception.getMessage(), containsString("bar")); + assertThat(exception.getMessage(), containsString("foo")); + } + + public void testInferApplicationNameAndPrivilegeName() throws Exception { + final PutPrivilegesRequestBuilder builder = new PutPrivilegesRequestBuilder(null, PutPrivilegesAction.INSTANCE); + builder.source(new BytesArray("{ \"foo\":{" + + "\"read\":{ \"actions\":[ \"data:/read/*\", \"admin:/read/*\" ] }," + + "\"write\":{ \"actions\":[ \"data:/write/*\", \"admin:/*\" ] }," + + "\"all\":{ \"actions\":[ \"*\" ] }" + + "} }"), XContentType.JSON); + assertThat(builder.request().getPrivileges(), iterableWithSize(3)); + for (ApplicationPrivilegeDescriptor p : builder.request().getPrivileges()) { + assertThat(p.getApplication(), equalTo("foo")); + assertThat(p.getName(), notNullValue()); + } + assertThat(builder.request().getPrivileges().get(0).getName(), equalTo("read")); + assertThat(builder.request().getPrivileges().get(1).getName(), equalTo("write")); + assertThat(builder.request().getPrivileges().get(2).getName(), equalTo("all")); + } + +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheckTests.java new file mode 100644 index 0000000000000..1d4da71e11b5e --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140JKSKeystoreBootstrapCheckTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +public class FIPS140JKSKeystoreBootstrapCheckTests extends ESTestCase { + + public void testNoKeystoreIsAllowed() { + final Settings.Builder settings = Settings.builder() + .put("xpack.security.fips_mode.enabled", "true"); + assertFalse(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure()); + } + + public void testSSLKeystoreTypeIsNotAllowed() { + final Settings.Builder settings = Settings.builder() + .put("xpack.security.fips_mode.enabled", "true") + .put("xpack.ssl.keystore.path", "/this/is/the/path") + .put("xpack.ssl.keystore.type", "JKS"); + assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure()); + } + + public void testSSLImplicitKeystoreTypeIsNotAllowed() { + final Settings.Builder settings = Settings.builder() + .put("xpack.security.fips_mode.enabled", "true") + .put("xpack.ssl.keystore.path", "/this/is/the/path") + .put("xpack.ssl.keystore.type", "JKS"); + assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure()); + } + + public void testTransportSSLKeystoreTypeIsNotAllowed() { + final Settings.Builder settings = Settings.builder() + .put("xpack.security.fips_mode.enabled", "true") + .put("xpack.security.transport.ssl.keystore.path", "/this/is/the/path") + .put("xpack.security.transport.ssl.keystore.type", "JKS"); + assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure()); + } + + public void testHttpSSLKeystoreTypeIsNotAllowed() { + final Settings.Builder settings = Settings.builder() + .put("xpack.security.fips_mode.enabled", "true") + .put("xpack.security.http.ssl.keystore.path", "/this/is/the/path") + .put("xpack.security.http.ssl.keystore.type", "JKS"); + assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure()); + } + + public void testRealmKeystoreTypeIsNotAllowed() { + final Settings.Builder settings = Settings.builder() + .put("xpack.security.fips_mode.enabled", "true") + .put("xpack.security.authc.realms.ldap.ssl.keystore.path", "/this/is/the/path") + .put("xpack.security.authc.realms.ldap.ssl.keystore.type", "JKS"); + assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure()); + } + + public void testImplicitRealmKeystoreTypeIsNotAllowed() { + final Settings.Builder settings = Settings.builder() + .put("xpack.security.fips_mode.enabled", "true") + .put("xpack.security.authc.realms.ldap.ssl.keystore.path", "/this/is/the/path"); + assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure()); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheckTests.java new file mode 100644 index 0000000000000..310a6e241e057 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140PasswordHashingAlgorithmBootstrapCheckTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.XPackSettings; + +public class FIPS140PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCase { + + public void testPBKDF2AlgorithmIsAllowed() { + Settings settings = Settings.builder().put("xpack.security.fips_mode.enabled", "true").build(); + + settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2_10000").build(); + assertFalse(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure()); + + settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2").build(); + assertFalse(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure()); + } + + public void testBCRYPTAlgorithmIsNotAllowed() { + Settings settings = Settings.builder().put("xpack.security.fips_mode.enabled", "true").build(); + assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure()); + settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT").build(); + assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure()); + + settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT11").build(); + assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure()); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheckTests.java new file mode 100644 index 0000000000000..fb9e7155242f0 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/FIPS140SecureSettingsBootstrapCheckTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.SimpleFSDirectory; +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.ESTestCase; + +import javax.crypto.SecretKey; +import javax.crypto.SecretKeyFactory; +import javax.crypto.spec.PBEKeySpec; +import java.io.ByteArrayOutputStream; +import java.nio.file.Path; +import java.security.AccessControlException; +import java.security.KeyStore; +import java.util.Base64; + +public class FIPS140SecureSettingsBootstrapCheckTests extends ESTestCase { + + public void testLegacySecureSettingsIsNotAllowed() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PBE is not available", inFipsJvm()); + final Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put("xpack.security.fips_mode.enabled", "true"); + Environment env = TestEnvironment.newEnvironment(builder.build()); + generateV2Keystore(env); + assertTrue(new FIPS140SecureSettingsBootstrapCheck(builder.build(), env).check(new BootstrapContext(builder.build(), + null)).isFailure()); + } + + public void testCorrectSecureSettingsVersionIsAllowed() throws Exception { + final Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put("xpack.security.fips_mode.enabled", "true"); + Environment env = TestEnvironment.newEnvironment(builder.build()); + final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); + try { + keyStoreWrapper.save(env.configFile(), "password".toCharArray()); + } catch (final AccessControlException e) { + if (e.getPermission() instanceof RuntimePermission && e.getPermission().getName().equals("accessUserInformation")) { + // this is expected:but we don't care in tests + } else { + throw e; + } + } + assertFalse(new FIPS140SecureSettingsBootstrapCheck(builder.build(), env).check(new BootstrapContext(builder.build(), + null)).isFailure()); + } + + private void generateV2Keystore(Environment env) throws Exception { + Path configDir = env.configFile(); + SimpleFSDirectory directory = new SimpleFSDirectory(configDir); + byte[] fileBytes = new byte[20]; + random().nextBytes(fileBytes); + try (IndexOutput output = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) { + + CodecUtil.writeHeader(output, "elasticsearch.keystore", 2); + output.writeByte((byte) 0); // hasPassword = false + output.writeString("PKCS12"); + output.writeString("PBE"); // string algo + output.writeString("PBE"); // file algo + + output.writeVInt(2); // num settings + output.writeString("string_setting"); + output.writeString("STRING"); + output.writeString("file_setting"); + output.writeString("FILE"); + + SecretKeyFactory secretFactory = SecretKeyFactory.getInstance("PBE"); + KeyStore keystore = KeyStore.getInstance("PKCS12"); + keystore.load(null, null); + SecretKey secretKey = secretFactory.generateSecret(new PBEKeySpec("stringSecretValue".toCharArray())); + KeyStore.ProtectionParameter protectionParameter = new KeyStore.PasswordProtection(new char[0]); + keystore.setEntry("string_setting", new KeyStore.SecretKeyEntry(secretKey), protectionParameter); + + byte[] base64Bytes = Base64.getEncoder().encode(fileBytes); + char[] chars = new char[base64Bytes.length]; + for (int i = 0; i < chars.length; ++i) { + chars[i] = (char) base64Bytes[i]; // PBE only stores the lower 8 bits, so this narrowing is ok + } + secretKey = secretFactory.generateSecret(new PBEKeySpec(chars)); + keystore.setEntry("file_setting", new KeyStore.SecretKeyEntry(secretKey), protectionParameter); + + ByteArrayOutputStream keystoreBytesStream = new ByteArrayOutputStream(); + keystore.store(keystoreBytesStream, new char[0]); + byte[] keystoreBytes = keystoreBytesStream.toByteArray(); + output.writeInt(keystoreBytes.length); + output.writeBytes(keystoreBytes, keystoreBytes.length); + CodecUtil.writeFooter(output); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java index 2d53a3e6e8615..0b9de2da33288 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java @@ -114,6 +114,6 @@ public void testMissingPrivilegesThrowsException() throws Exception { final ElasticsearchParseException parseException = expectThrows(ElasticsearchParseException.class, () -> builder.source("elastic", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON) ); - assertThat(parseException.getMessage(), containsString("[index] and [cluster] are both missing")); + assertThat(parseException.getMessage(), containsString("[cluster,index,applications] are missing")); } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index 2c49c8d595e0c..a2e283e1b36ff 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -15,43 +15,60 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse.IndexPrivileges; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse.ResourcePrivileges; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authz.AuthorizationService; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import org.hamcrest.Matchers; import org.junit.Before; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +@TestLogging("org.elasticsearch.xpack.security.action.user.TransportHasPrivilegesAction:TRACE," + + "org.elasticsearch.xpack.core.security.authz.permission.ApplicationPermission:DEBUG") public class TransportHasPrivilegesActionTests extends ESTestCase { private User user; private Role role; private TransportHasPrivilegesAction action; + private List applicationPrivileges; @Before public void setup() { @@ -75,7 +92,19 @@ public void setup() { return null; }).when(authorizationService).roles(eq(user), any(ActionListener.class)); - action = new TransportHasPrivilegesAction(settings, threadPool, transportService, mock(ActionFilters.class), authorizationService); + applicationPrivileges = new ArrayList<>(); + NativePrivilegeStore privilegeStore = mock(NativePrivilegeStore.class); + Mockito.doAnswer(inv -> { + assertThat(inv.getArguments(), arrayWithSize(3)); + ActionListener> listener + = (ActionListener>) inv.getArguments()[2]; + logger.info("Privileges for ({}) are {}", Arrays.toString(inv.getArguments()), applicationPrivileges); + listener.onResponse(applicationPrivileges); + return null; + }).when(privilegeStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); + + action = new TransportHasPrivilegesAction(settings, threadPool, transportService, mock(ActionFilters.class), authorizationService, + privilegeStore); } /** @@ -92,6 +121,7 @@ public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception { .indices("academy") .privileges(DeleteAction.NAME, IndexAction.NAME) .build()); + request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); final PlainActionFuture future = new PlainActionFuture(); action.doExecute(mock(Task.class), request, future); @@ -103,8 +133,8 @@ public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception { assertThat(response.getClusterPrivileges().get(ClusterHealthAction.NAME), equalTo(true)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); - final IndexPrivileges result = response.getIndexPrivileges().get(0); - assertThat(result.getIndex(), equalTo("academy")); + final ResourcePrivileges result = response.getIndexPrivileges().get(0); + assertThat(result.getResource(), equalTo("academy")); assertThat(result.getPrivileges().size(), equalTo(2)); assertThat(result.getPrivileges().get(DeleteAction.NAME), equalTo(true)); assertThat(result.getPrivileges().get(IndexAction.NAME), equalTo(true)); @@ -128,6 +158,7 @@ public void testMatchSubsetOfPrivileges() throws Exception { .indices("academy", "initiative", "school") .privileges("delete", "index", "manage") .build()); + request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); final PlainActionFuture future = new PlainActionFuture(); action.doExecute(mock(Task.class), request, future); @@ -139,23 +170,23 @@ public void testMatchSubsetOfPrivileges() throws Exception { assertThat(response.getClusterPrivileges().get("manage"), equalTo(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(3)); - final IndexPrivileges academy = response.getIndexPrivileges().get(0); - final IndexPrivileges initiative = response.getIndexPrivileges().get(1); - final IndexPrivileges school = response.getIndexPrivileges().get(2); + final ResourcePrivileges academy = response.getIndexPrivileges().get(0); + final ResourcePrivileges initiative = response.getIndexPrivileges().get(1); + final ResourcePrivileges school = response.getIndexPrivileges().get(2); - assertThat(academy.getIndex(), equalTo("academy")); + assertThat(academy.getResource(), equalTo("academy")); assertThat(academy.getPrivileges().size(), equalTo(3)); assertThat(academy.getPrivileges().get("index"), equalTo(true)); // explicit assertThat(academy.getPrivileges().get("delete"), equalTo(false)); assertThat(academy.getPrivileges().get("manage"), equalTo(false)); - assertThat(initiative.getIndex(), equalTo("initiative")); + assertThat(initiative.getResource(), equalTo("initiative")); assertThat(initiative.getPrivileges().size(), equalTo(3)); assertThat(initiative.getPrivileges().get("index"), equalTo(true)); // implied by write assertThat(initiative.getPrivileges().get("delete"), equalTo(true)); // implied by write assertThat(initiative.getPrivileges().get("manage"), equalTo(false)); - assertThat(school.getIndex(), equalTo("school")); + assertThat(school.getResource(), equalTo("school")); assertThat(school.getPrivileges().size(), equalTo(3)); assertThat(school.getPrivileges().get("index"), equalTo(false)); assertThat(school.getPrivileges().get("delete"), equalTo(false)); @@ -177,8 +208,8 @@ public void testMatchNothing() throws Exception { .build(), Strings.EMPTY_ARRAY); assertThat(response.isCompleteMatch(), is(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); - final IndexPrivileges result = response.getIndexPrivileges().get(0); - assertThat(result.getIndex(), equalTo("academy")); + final ResourcePrivileges result = response.getIndexPrivileges().get(0); + assertThat(result.getResource(), equalTo("academy")); assertThat(result.getPrivileges().size(), equalTo(2)); assertThat(result.getPrivileges().get("read"), equalTo(false)); assertThat(result.getPrivileges().get("write"), equalTo(false)); @@ -191,10 +222,20 @@ public void testMatchNothing() throws Exception { * does the user have ___ privilege on a wildcard that covers (is a superset of) this pattern? */ public void testWildcardHandling() throws Exception { + final ApplicationPrivilege kibanaRead = defineApplicationPrivilege("kibana", "read", + "data:read/*", "action:login", "action:view/dashboard"); + final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege("kibana", "write", + "data:write/*", "action:login", "action:view/dashboard"); + final ApplicationPrivilege kibanaAdmin = defineApplicationPrivilege("kibana", "admin", + "action:login", "action:manage/*"); + final ApplicationPrivilege kibanaViewSpace = defineApplicationPrivilege("kibana", "view-space", + "action:login", "space:view/*"); role = Role.builder("test3") .add(IndexPrivilege.ALL, "logstash-*", "foo?") .add(IndexPrivilege.READ, "abc*") .add(IndexPrivilege.WRITE, "*xyz") + .addApplicationPrivilege(kibanaRead, Collections.singleton("*")) + .addApplicationPrivilege(kibanaViewSpace, newHashSet("space/engineering/*", "space/builds")) .build(); final HasPrivilegesRequest request = new HasPrivilegesRequest(); @@ -230,6 +271,20 @@ public void testWildcardHandling() throws Exception { .privileges("read", "write", "manage") // read = No, write = Yes (WRITE, "*xyz"), manage = No .build() ); + + request.applicationPrivileges( + RoleDescriptor.ApplicationResourcePrivileges.builder() + .resources("*") + .application("kibana") + .privileges(Sets.union(kibanaRead.name(), kibanaWrite.name())) // read = Yes, write = No + .build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .resources("space/engineering/project-*", "space/*") // project-* = Yes, space/* = Not + .application("kibana") + .privileges("space:view/dashboard") + .build() + ); + final PlainActionFuture future = new PlainActionFuture(); action.doExecute(mock(Task.class), request, future); @@ -238,15 +293,28 @@ public void testWildcardHandling() throws Exception { assertThat(response.isCompleteMatch(), is(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(8)); assertThat(response.getIndexPrivileges(), containsInAnyOrder( - new IndexPrivileges("logstash-2016-*", Collections.singletonMap("write", true)), - new IndexPrivileges("logstash-*", Collections.singletonMap("read", true)), - new IndexPrivileges("log*", Collections.singletonMap("manage", false)), - new IndexPrivileges("foo?", Collections.singletonMap("read", true)), - new IndexPrivileges("foo*", Collections.singletonMap("read", false)), - new IndexPrivileges("abcd*", mapBuilder().put("read", true).put("write", false).map()), - new IndexPrivileges("abc*xyz", mapBuilder().put("read", true).put("write", true).put("manage", false).map()), - new IndexPrivileges("a*xyz", mapBuilder().put("read", false).put("write", true).put("manage", false).map()) + new ResourcePrivileges("logstash-2016-*", Collections.singletonMap("write", true)), + new ResourcePrivileges("logstash-*", Collections.singletonMap("read", true)), + new ResourcePrivileges("log*", Collections.singletonMap("manage", false)), + new ResourcePrivileges("foo?", Collections.singletonMap("read", true)), + new ResourcePrivileges("foo*", Collections.singletonMap("read", false)), + new ResourcePrivileges("abcd*", mapBuilder().put("read", true).put("write", false).map()), + new ResourcePrivileges("abc*xyz", mapBuilder().put("read", true).put("write", true).put("manage", false).map()), + new ResourcePrivileges("a*xyz", mapBuilder().put("read", false).put("write", true).put("manage", false).map()) )); + assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(1)); + final List kibanaPrivileges = response.getApplicationPrivileges().get("kibana"); + assertThat(kibanaPrivileges, Matchers.iterableWithSize(3)); + assertThat(Strings.collectionToCommaDelimitedString(kibanaPrivileges), kibanaPrivileges, containsInAnyOrder( + new ResourcePrivileges("*", mapBuilder().put("read", true).put("write", false).map()), + new ResourcePrivileges("space/engineering/project-*", Collections.singletonMap("space:view/dashboard", true)), + new ResourcePrivileges("space/*", Collections.singletonMap("space:view/dashboard", false)) + )); + } + + private ApplicationPrivilege defineApplicationPrivilege(String app, String name, String ... actions) { + this.applicationPrivileges.add(new ApplicationPrivilegeDescriptor(app, name, newHashSet(actions), emptyMap())); + return new ApplicationPrivilege(app, name, actions); } public void testCheckingIndexPermissionsDefinedOnDifferentPatterns() throws Exception { @@ -262,27 +330,152 @@ public void testCheckingIndexPermissionsDefinedOnDifferentPatterns() throws Exce assertThat(response.isCompleteMatch(), is(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(2)); assertThat(response.getIndexPrivileges(), containsInAnyOrder( - new IndexPrivileges("apache-2016-12", + new ResourcePrivileges("apache-2016-12", MapBuilder.newMapBuilder(new LinkedHashMap()) .put("index", true).put("delete", true).map()), - new IndexPrivileges("apache-2017-01", + new ResourcePrivileges("apache-2017-01", MapBuilder.newMapBuilder(new LinkedHashMap()) .put("index", true).put("delete", false).map() ) )); } + public void testCheckingApplicationPrivilegesOnDifferentApplicationsAndResources() throws Exception { + final ApplicationPrivilege app1Read = defineApplicationPrivilege("app1", "read", "data:read/*"); + final ApplicationPrivilege app1Write = defineApplicationPrivilege("app1", "write", "data:write/*"); + final ApplicationPrivilege app1All = defineApplicationPrivilege("app1", "all", "*"); + final ApplicationPrivilege app2Read = defineApplicationPrivilege("app2", "read", "data:read/*"); + final ApplicationPrivilege app2Write = defineApplicationPrivilege("app2", "write", "data:write/*"); + final ApplicationPrivilege app2All = defineApplicationPrivilege("app2", "all", "*"); + + role = Role.builder("test-role") + .addApplicationPrivilege(app1Read, Collections.singleton("foo/*")) + .addApplicationPrivilege(app1All, Collections.singleton("foo/bar/baz")) + .addApplicationPrivilege(app2Read, Collections.singleton("foo/bar/*")) + .addApplicationPrivilege(app2Write, Collections.singleton("*/bar/*")) + .build(); + + final HasPrivilegesResponse response = hasPrivileges(new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app1") + .resources("foo/1", "foo/bar/2", "foo/bar/baz", "baz/bar/foo") + .privileges("read", "write", "all") + .build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app2") + .resources("foo/1", "foo/bar/2", "foo/bar/baz", "baz/bar/foo") + .privileges("read", "write", "all") + .build() + }, Strings.EMPTY_ARRAY); + + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getIndexPrivileges(), Matchers.emptyIterable()); + assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(2)); + final List app1 = response.getApplicationPrivileges().get("app1"); + assertThat(app1, Matchers.iterableWithSize(4)); + assertThat(Strings.collectionToCommaDelimitedString(app1), app1, containsInAnyOrder( + new ResourcePrivileges("foo/1", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", false).put("all", false).map()), + new ResourcePrivileges("foo/bar/2", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", false).put("all", false).map()), + new ResourcePrivileges("foo/bar/baz", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", true).put("all", true).map()), + new ResourcePrivileges("baz/bar/foo", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", false).put("write", false).put("all", false).map()) + )); + final List app2 = response.getApplicationPrivileges().get("app2"); + assertThat(app2, Matchers.iterableWithSize(4)); + assertThat(Strings.collectionToCommaDelimitedString(app2), app2, containsInAnyOrder( + new ResourcePrivileges("foo/1", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", false).put("write", false).put("all", false).map()), + new ResourcePrivileges("foo/bar/2", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", true).put("all", false).map()), + new ResourcePrivileges("foo/bar/baz", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", true).put("all", false).map()), + new ResourcePrivileges("baz/bar/foo", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", false).put("write", true).put("all", false).map()) + )); + } + + public void testCheckingApplicationPrivilegesWithComplexNames() throws Exception { + final String appName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(3, 10); + final String action1 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 5); + final String action2 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(6, 9); + + final ApplicationPrivilege priv1 = defineApplicationPrivilege(appName, action1, "DATA:read/*", "ACTION:" + action1); + final ApplicationPrivilege priv2 = defineApplicationPrivilege(appName, action2, "DATA:read/*", "ACTION:" + action2); + + role = Role.builder("test-write") + .addApplicationPrivilege(priv1, Collections.singleton("user/*/name")) + .build(); + + final HasPrivilegesResponse response = hasPrivileges( + new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(appName) + .resources("user/hawkeye/name") + .privileges("DATA:read/user/*", "ACTION:" + action1, "ACTION:" + action2, action1, action2) + .build() + }, + "monitor"); + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getApplicationPrivileges().keySet(), containsInAnyOrder(appName)); + assertThat(response.getApplicationPrivileges().get(appName), iterableWithSize(1)); + assertThat(response.getApplicationPrivileges().get(appName), containsInAnyOrder( + new ResourcePrivileges("user/hawkeye/name", MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("DATA:read/user/*", true) + .put("ACTION:" + action1, true) + .put("ACTION:" + action2, false) + .put(action1, true) + .put(action2, false) + .map()) + )); + } + public void testIsCompleteMatch() throws Exception { + final ApplicationPrivilege kibanaRead = defineApplicationPrivilege("kibana", "read", "data:read/*"); + final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege("kibana", "write", "data:write/*"); role = Role.builder("test-write") .cluster(ClusterPrivilege.MONITOR) .add(IndexPrivilege.READ, "read-*") .add(IndexPrivilege.ALL, "all-*") + .addApplicationPrivilege(kibanaRead, Collections.singleton("*")) .build(); assertThat(hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), "monitor").isCompleteMatch(), is(true)); assertThat(hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), "manage").isCompleteMatch(), is(false)); assertThat(hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), "monitor").isCompleteMatch(), is(false)); assertThat(hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), "manage").isCompleteMatch(), is(false)); + assertThat(hasPrivileges( + new RoleDescriptor.IndicesPrivileges[]{ + RoleDescriptor.IndicesPrivileges.builder() + .indices("read-a") + .privileges("read") + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("all-b") + .privileges("read", "write") + .build() + }, + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana") + .resources("*") + .privileges("read") + .build() + }, + "monitor").isCompleteMatch(), is(true)); + assertThat(hasPrivileges( + new RoleDescriptor.IndicesPrivileges[]{indexPrivileges("read", "read-123", "read-456", "all-999")}, + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana").resources("*").privileges("read").build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana").resources("*").privileges("write").build() + }, + "monitor").isCompleteMatch(), is(false)); } private RoleDescriptor.IndicesPrivileges indexPrivileges(String priv, String... indices) { @@ -294,10 +487,21 @@ private RoleDescriptor.IndicesPrivileges indexPrivileges(String priv, String... private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges indicesPrivileges, String... clusterPrivileges) throws Exception { + return hasPrivileges( + new RoleDescriptor.IndicesPrivileges[]{indicesPrivileges}, + new RoleDescriptor.ApplicationResourcePrivileges[0], + clusterPrivileges + ); + } + + private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges[] indicesPrivileges, + RoleDescriptor.ApplicationResourcePrivileges[] appPrivileges, + String... clusterPrivileges) throws Exception { final HasPrivilegesRequest request = new HasPrivilegesRequest(); request.username(user.principal()); request.clusterPrivileges(clusterPrivileges); request.indexPrivileges(indicesPrivileges); + request.applicationPrivileges(appPrivileges); final PlainActionFuture future = new PlainActionFuture(); action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index 7d4469133687e..453820ea519fc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -79,7 +79,7 @@ public String configUsers() { public String configUsersRoles() { return super.configUsersRoles() + ROLE_CAN_RUN_AS + ":" + AUTHENTICATE_USER + "\n" - + "kibana_user:" + EXECUTE_USER; + + "monitoring_user:" + EXECUTE_USER; } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index bb32ed699950c..fda87d0340b8a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; +import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; @@ -88,6 +89,7 @@ import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError; import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockGetTokenFromId; import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -618,6 +620,47 @@ public void testRealmSupportsMethodThrowingExceptionRest() throws Exception { } } + public void testRealmAuthenticateTerminatingAuthenticationProcess() throws Exception { + final AuthenticationToken token = mock(AuthenticationToken.class); + when(secondRealm.token(threadContext)).thenReturn(token); + when(secondRealm.supports(token)).thenReturn(true); + final boolean terminateWithNoException = rarely(); + final boolean throwElasticsearchSecurityException = (terminateWithNoException == false) && randomBoolean(); + final boolean withAuthenticateHeader = throwElasticsearchSecurityException && randomBoolean(); + Exception throwE = new Exception("general authentication error"); + final String basicScheme = "Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\""; + String selectedScheme = randomFrom(basicScheme, "Negotiate IOJoj"); + if (throwElasticsearchSecurityException) { + throwE = new ElasticsearchSecurityException("authentication error", RestStatus.UNAUTHORIZED); + if (withAuthenticateHeader) { + ((ElasticsearchSecurityException) throwE).addHeader("WWW-Authenticate", selectedScheme); + } + } + mockAuthenticate(secondRealm, token, (terminateWithNoException) ? null : throwE, true); + + ElasticsearchSecurityException e = + expectThrows(ElasticsearchSecurityException.class, () -> authenticateBlocking("_action", message, null)); + if (terminateWithNoException) { + assertThat(e.getMessage(), is("terminate authc process")); + assertThat(e.getHeader("WWW-Authenticate"), contains(basicScheme)); + } else { + if (throwElasticsearchSecurityException) { + assertThat(e.getMessage(), is("authentication error")); + if (withAuthenticateHeader) { + assertThat(e.getHeader("WWW-Authenticate"), contains(selectedScheme)); + } else { + assertThat(e.getHeader("WWW-Authenticate"), contains(basicScheme)); + } + } else { + assertThat(e.getMessage(), is("error attempting to authenticate request")); + assertThat(e.getHeader("WWW-Authenticate"), contains(basicScheme)); + } + } + verify(auditTrail).authenticationFailed(secondRealm.name(), token, "_action", message); + verify(auditTrail).authenticationFailed(token, "_action", message); + verifyNoMoreInteractions(auditTrail); + } + public void testRealmAuthenticateThrowingException() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); when(secondRealm.token(threadContext)).thenReturn(token); @@ -998,6 +1041,19 @@ private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) }).when(realm).authenticate(eq(token), any(ActionListener.class)); } + @SuppressWarnings("unchecked") + private void mockAuthenticate(Realm realm, AuthenticationToken token, Exception e, boolean terminate) { + doAnswer((i) -> { + ActionListener listener = (ActionListener) i.getArguments()[1]; + if (terminate) { + listener.onResponse(AuthenticationResult.terminate("terminate authc process", e)); + } else { + listener.onResponse(AuthenticationResult.unsuccessful("unsuccessful, but continue authc process", e)); + } + return null; + }).when(realm).authenticate(eq(token), any(ActionListener.class)); + } + private Authentication authenticateBlocking(RestRequest restRequest) { PlainActionFuture future = new PlainActionFuture<>(); service.authenticate(restRequest, future); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java index 0cbeced00b2ab..2e91c40677ed6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java @@ -14,6 +14,11 @@ import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; +import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; +import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; +import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; @@ -49,4 +54,12 @@ public void testNativeRealmRegistersIndexHealthChangeListener() throws Exception TestEnvironment.newEnvironment(settings), new ThreadContext(settings))); verify(securityIndex, times(2)).addIndexStateListener(isA(BiConsumer.class)); } + + public void testIsStandardType() { + String type = randomFrom(NativeRealmSettings.TYPE, FileRealmSettings.TYPE, LdapRealmSettings.AD_TYPE, LdapRealmSettings.LDAP_TYPE, + PkiRealmSettings.TYPE); + assertThat(InternalRealms.isStandardRealm(type), is(true)); + type = randomFrom(SamlRealmSettings.TYPE, KerberosRealmSettings.TYPE); + assertThat(InternalRealms.isStandardRealm(type), is(false)); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java index ff4c30ddf8c0a..a71f5cb1cf761 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.security.user.User; @@ -335,10 +336,11 @@ public void testUnlicensedWithNativeRealmSettingss() throws Exception { } public void testUnlicensedWithNonStandardRealms() throws Exception { - factories.put(SamlRealmSettings.TYPE, config -> new DummyRealm(SamlRealmSettings.TYPE, config)); + final String selectedRealmType = randomFrom(SamlRealmSettings.TYPE, KerberosRealmSettings.TYPE); + factories.put(selectedRealmType, config -> new DummyRealm(selectedRealmType, config)); Settings.Builder builder = Settings.builder() .put("path.home", createTempDir()) - .put("xpack.security.authc.realms.foo.type", SamlRealmSettings.TYPE) + .put("xpack.security.authc.realms.foo.type", selectedRealmType) .put("xpack.security.authc.realms.foo.order", "0"); Settings settings = builder.build(); Environment env = TestEnvironment.newEnvironment(settings); @@ -349,7 +351,7 @@ public void testUnlicensedWithNonStandardRealms() throws Exception { assertThat(realm, is(reservedRealm)); assertThat(iter.hasNext(), is(true)); realm = iter.next(); - assertThat(realm.type(), is(SamlRealmSettings.TYPE)); + assertThat(realm.type(), is(selectedRealmType)); assertThat(iter.hasNext(), is(false)); when(licenseState.allowedRealmType()).thenReturn(AllowedRealmType.DEFAULT); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java index c42353ee75232..e94cdff423274 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java @@ -47,7 +47,7 @@ protected MigrateUserOrRoles newMigrateUserOrRoles() { @Override protected Environment createEnv(Map settings) throws UserException { Settings.Builder builder = Settings.builder(); - settings.forEach((k,v) -> builder.put(k, v)); + settings.forEach((k, v) -> builder.put(k, v)); return TestEnvironment.newEnvironment(builder.build()); } @@ -75,9 +75,11 @@ public void testRoleJson() throws Exception { String[] runAs = Strings.EMPTY_ARRAY; RoleDescriptor rd = new RoleDescriptor("rolename", cluster, ips, runAs); assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createRoleJson(rd), - equalTo("{\"cluster\":[],\"indices\":[{\"names\":[\"i1\",\"i2\",\"i3\"]," + - "\"privileges\":[\"all\"],\"field_security\":{\"grant\":[\"body\"]}}]," + - "\"run_as\":[],\"metadata\":{},\"type\":\"role\"}")); + equalTo("{\"cluster\":[]," + + "\"indices\":[{\"names\":[\"i1\",\"i2\",\"i3\"]," + + "\"privileges\":[\"all\"],\"field_security\":{\"grant\":[\"body\"]}}]," + + "\"applications\":[]," + + "\"run_as\":[],\"metadata\":{},\"type\":\"role\"}")); } public void testTerminalLogger() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 2b5f81cac0175..b7cd23745b932 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.core.action.XPackUsageResponse; @@ -66,6 +67,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; /** * Tests for the NativeUsersStore and NativeRolesStore @@ -358,10 +360,11 @@ public void testCreateAndUpdateRole() { assertThat(e.status(), is(RestStatus.FORBIDDEN)); } } else { + final TransportRequest request = mock(TransportRequest.class); GetRolesResponse getRolesResponse = c.prepareGetRoles().names("test_role").get(); assertTrue("test_role does not exist!", getRolesResponse.hasRoles()); assertTrue("any cluster permission should be authorized", - Role.builder(getRolesResponse.roles()[0], null).build().cluster().check("cluster:admin/foo")); + Role.builder(getRolesResponse.roles()[0], null).build().cluster().check("cluster:admin/foo", request)); c.preparePutRole("test_role") .cluster("none") @@ -372,7 +375,7 @@ public void testCreateAndUpdateRole() { assertTrue("test_role does not exist!", getRolesResponse.hasRoles()); assertFalse("no cluster permission should be authorized", - Role.builder(getRolesResponse.roles()[0], null).build().cluster().check("cluster:admin/bar")); + Role.builder(getRolesResponse.roles()[0], null).build().cluster().check("cluster:admin/bar", request)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationTokenTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationTokenTests.java new file mode 100644 index 0000000000000..eaba796b41fe4 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationTokenTests.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Base64; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; + +public class KerberosAuthenticationTokenTests extends ESTestCase { + + private static final String UNAUTHENTICATED_PRINCIPAL_NAME = ""; + + public void testExtractTokenForValidAuthorizationHeader() throws IOException { + final String base64Token = Base64.getEncoder().encodeToString(randomAlphaOfLength(5).getBytes(StandardCharsets.UTF_8)); + final String negotiate = randomBoolean() ? KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX : "negotiate "; + final String authzHeader = negotiate + base64Token; + + final KerberosAuthenticationToken kerbAuthnToken = KerberosAuthenticationToken.extractToken(authzHeader); + assertNotNull(kerbAuthnToken); + assertEquals(UNAUTHENTICATED_PRINCIPAL_NAME, kerbAuthnToken.principal()); + assertThat(kerbAuthnToken.credentials(), instanceOf((byte[].class))); + assertArrayEquals(Base64.getDecoder().decode(base64Token), (byte[]) kerbAuthnToken.credentials()); + } + + public void testExtractTokenForInvalidNegotiateAuthorizationHeaderShouldReturnNull() throws IOException { + final String header = randomFrom("negotiate", "Negotiate", " Negotiate", "NegotiateToken", "Basic ", " Custom ", null); + assertNull(KerberosAuthenticationToken.extractToken(header)); + } + + public void testExtractTokenForNegotiateAuthorizationHeaderWithNoTokenShouldThrowException() throws IOException { + final String header = randomFrom(KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX, "negotiate ", "Negotiate "); + final ElasticsearchSecurityException e = + expectThrows(ElasticsearchSecurityException.class, () -> KerberosAuthenticationToken.extractToken(header)); + assertThat(e.getMessage(), + equalTo("invalid negotiate authentication header value, expected base64 encoded token but value is empty")); + assertContainsAuthenticateHeader(e); + } + + public void testExtractTokenForNotBase64EncodedTokenThrowsException() throws IOException { + final String notBase64Token = "[B@6499375d"; + + final ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, + () -> KerberosAuthenticationToken.extractToken(KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX + notBase64Token)); + assertThat(e.getMessage(), + equalTo("invalid negotiate authentication header value, could not decode base64 token " + notBase64Token)); + assertContainsAuthenticateHeader(e); + } + + public void testKerberoAuthenticationTokenClearCredentials() { + byte[] inputBytes = randomByteArrayOfLength(5); + final String base64Token = Base64.getEncoder().encodeToString(inputBytes); + final KerberosAuthenticationToken kerbAuthnToken = + KerberosAuthenticationToken.extractToken(KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX + base64Token); + kerbAuthnToken.clearCredentials(); + Arrays.fill(inputBytes, (byte) 0); + assertArrayEquals(inputBytes, (byte[]) kerbAuthnToken.credentials()); + } + + public void testEqualsHashCode() { + final KerberosAuthenticationToken kerberosAuthenticationToken = + new KerberosAuthenticationToken("base64EncodedToken".getBytes(StandardCharsets.UTF_8)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(kerberosAuthenticationToken, (original) -> { + return new KerberosAuthenticationToken((byte[]) original.credentials()); + }); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(kerberosAuthenticationToken, (original) -> { + byte[] originalCreds = (byte[]) original.credentials(); + return new KerberosAuthenticationToken(Arrays.copyOf(originalCreds, originalCreds.length)); + }); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(kerberosAuthenticationToken, (original) -> { + return new KerberosAuthenticationToken((byte[]) original.credentials()); + }, KerberosAuthenticationTokenTests::mutateTestItem); + } + + private static KerberosAuthenticationToken mutateTestItem(KerberosAuthenticationToken original) { + switch (randomIntBetween(0, 2)) { + case 0: + return new KerberosAuthenticationToken(randomByteArrayOfLength(10)); + case 1: + return new KerberosAuthenticationToken("base64EncodedToken".getBytes(StandardCharsets.UTF_16)); + case 2: + return new KerberosAuthenticationToken("[B@6499375d".getBytes(StandardCharsets.UTF_8)); + default: + throw new IllegalArgumentException("unknown option"); + } + } + + private static void assertContainsAuthenticateHeader(ElasticsearchSecurityException e) { + assertThat(e.status(), is(RestStatus.UNAUTHORIZED)); + assertThat(e.getHeaderKeys(), hasSize(1)); + assertThat(e.getHeader(KerberosAuthenticationToken.WWW_AUTHENTICATE), notNullValue()); + assertThat(e.getHeader(KerberosAuthenticationToken.WWW_AUTHENTICATE), contains(KerberosAuthenticationToken.NEGOTIATE_SCHEME_NAME)); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java new file mode 100644 index 0000000000000..5bc239241cf11 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.user.User; +import org.ietf.jgss.GSSException; + +import java.nio.file.Path; +import java.util.List; + +import javax.security.auth.login.LoginException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.AdditionalMatchers.aryEq; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.verify; + +public class KerberosRealmAuthenticateFailedTests extends KerberosRealmTestCase { + + public void testAuthenticateWithNonKerberosAuthenticationToken() { + final KerberosRealm kerberosRealm = createKerberosRealm(randomAlphaOfLength(5)); + + final UsernamePasswordToken usernamePasswordToken = + new UsernamePasswordToken(randomAlphaOfLength(5), new SecureString(new char[] { 'a', 'b', 'c' })); + expectThrows(AssertionError.class, () -> kerberosRealm.authenticate(usernamePasswordToken, PlainActionFuture.newFuture())); + } + + public void testAuthenticateDifferentFailureScenarios() throws LoginException, GSSException { + final String username = randomPrincipalName(); + final String outToken = randomAlphaOfLength(10); + final KerberosRealm kerberosRealm = createKerberosRealm(username); + final boolean validTicket = rarely(); + final boolean throwExceptionForInvalidTicket = validTicket ? false : randomBoolean(); + final boolean throwLoginException = randomBoolean(); + final byte[] decodedTicket = randomByteArrayOfLength(5); + final Path keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + final boolean krbDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); + if (validTicket) { + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); + } else { + if (throwExceptionForInvalidTicket) { + if (throwLoginException) { + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, null, new LoginException("Login Exception")); + } else { + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, null, new GSSException(GSSException.FAILURE)); + } + } else { + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(null, outToken), null); + } + } + final boolean nullKerberosAuthnToken = rarely(); + final KerberosAuthenticationToken kerberosAuthenticationToken = + nullKerberosAuthnToken ? null : new KerberosAuthenticationToken(decodedTicket); + if (nullKerberosAuthnToken) { + expectThrows(AssertionError.class, + () -> kerberosRealm.authenticate(kerberosAuthenticationToken, PlainActionFuture.newFuture())); + } else { + final PlainActionFuture future = new PlainActionFuture<>(); + kerberosRealm.authenticate(kerberosAuthenticationToken, future); + AuthenticationResult result = future.actionGet(); + assertThat(result, is(notNullValue())); + if (validTicket) { + final String expectedUsername = maybeRemoveRealmName(username); + final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, null, true); + assertSuccessAuthenticationResult(expectedUser, outToken, result); + } else { + assertThat(result.getStatus(), is(equalTo(AuthenticationResult.Status.TERMINATE))); + if (throwExceptionForInvalidTicket == false) { + assertThat(result.getException(), is(instanceOf(ElasticsearchSecurityException.class))); + final List wwwAuthnHeader = ((ElasticsearchSecurityException) result.getException()) + .getHeader(KerberosAuthenticationToken.WWW_AUTHENTICATE); + assertThat(wwwAuthnHeader, is(notNullValue())); + assertThat(wwwAuthnHeader.get(0), is(equalTo(KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX + outToken))); + assertThat(result.getMessage(), is(equalTo("failed to authenticate user, gss context negotiation not complete"))); + } else { + if (throwLoginException) { + assertThat(result.getMessage(), is(equalTo("failed to authenticate user, service login failure"))); + } else { + assertThat(result.getMessage(), is(equalTo("failed to authenticate user, gss context negotiation failure"))); + } + assertThat(result.getException(), is(instanceOf(ElasticsearchSecurityException.class))); + final List wwwAuthnHeader = ((ElasticsearchSecurityException) result.getException()) + .getHeader(KerberosAuthenticationToken.WWW_AUTHENTICATE); + assertThat(wwwAuthnHeader, is(notNullValue())); + assertThat(wwwAuthnHeader.get(0), is(equalTo(KerberosAuthenticationToken.NEGOTIATE_SCHEME_NAME))); + } + } + verify(mockKerberosTicketValidator).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), + any(ActionListener.class)); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java new file mode 100644 index 0000000000000..b6e1df9ddbb79 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmBootstrapCheckTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.bootstrap.BootstrapContext; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authc.RealmSettings; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; + +import java.io.IOException; +import java.nio.file.Path; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class KerberosRealmBootstrapCheckTests extends ESTestCase { + + public void testBootstrapCheckFailsForMultipleKerberosRealms() throws IOException { + final Path tempDir = createTempDir(); + final Settings settings1 = buildKerberosRealmSettings("kerb1", false, tempDir); + final Settings settings2 = buildKerberosRealmSettings("kerb2", false, tempDir); + final Settings settings3 = realm("pki1", PkiRealmSettings.TYPE, Settings.builder()).build(); + final Settings settings = + Settings.builder().put("path.home", tempDir).put(settings1).put(settings2).put(settings3).build(); + final BootstrapContext context = new BootstrapContext(settings, null); + final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = + new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); + final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); + assertThat(result, is(notNullValue())); + assertThat(result.isFailure(), is(true)); + assertThat(result.getMessage(), equalTo("multiple [" + KerberosRealmSettings.TYPE + "] realms are configured. [" + + KerberosRealmSettings.TYPE + "] can only have one such realm configured")); + } + + public void testBootstrapCheckFailsForMissingKeytabFile() throws IOException { + final Path tempDir = createTempDir(); + final Settings settings = + Settings.builder().put("path.home", tempDir).put(buildKerberosRealmSettings("kerb1", true, tempDir)).build(); + final BootstrapContext context = new BootstrapContext(settings, null); + final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = + new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); + final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); + assertThat(result, is(notNullValue())); + assertThat(result.isFailure(), is(true)); + assertThat(result.getMessage(), + equalTo("configured service key tab file [" + tempDir.resolve("kerb1.keytab").toString() + "] does not exist")); + } + + public void testBootstrapCheckFailsForMissingRealmType() throws IOException { + final Path tempDir = createTempDir(); + final String name = "kerb1"; + final Settings settings1 = buildKerberosRealmSettings("kerb1", false, tempDir); + final Settings settings2 = realm(name, randomFrom("", " "), Settings.builder()).build(); + final Settings settings = + Settings.builder().put("path.home", tempDir).put(settings1).put(settings2).build(); + final BootstrapContext context = new BootstrapContext(settings, null); + final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = + new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(settings)); + final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); + assertThat(result, is(notNullValue())); + assertThat(result.isFailure(), is(true)); + assertThat(result.getMessage(), equalTo("missing realm type for [" + name + "] realm")); + } + + public void testBootstrapCheckSucceedsForCorrectConfiguration() throws IOException { + final Path tempDir = createTempDir(); + final Settings finalSettings = + Settings.builder().put("path.home", tempDir).put(buildKerberosRealmSettings("kerb1", false, tempDir)).build(); + final BootstrapContext context = new BootstrapContext(finalSettings, null); + final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = + new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(finalSettings)); + final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); + assertThat(result, is(notNullValue())); + assertThat(result.isSuccess(), is(true)); + } + + public void testBootstrapCheckSucceedsForNoKerberosRealms() throws IOException { + final Path tempDir = createTempDir(); + final Settings finalSettings = Settings.builder().put("path.home", tempDir).build(); + final BootstrapContext context = new BootstrapContext(finalSettings, null); + final KerberosRealmBootstrapCheck kerbRealmBootstrapCheck = + new KerberosRealmBootstrapCheck(TestEnvironment.newEnvironment(finalSettings)); + final BootstrapCheck.BootstrapCheckResult result = kerbRealmBootstrapCheck.check(context); + assertThat(result, is(notNullValue())); + assertThat(result.isSuccess(), is(true)); + } + + private Settings buildKerberosRealmSettings(final String name, final boolean missingKeytab, final Path tempDir) throws IOException { + final Settings.Builder builder = Settings.builder(); + if (missingKeytab == false) { + KerberosTestCase.writeKeyTab(tempDir.resolve(name + ".keytab"), null); + } + builder.put(KerberosTestCase.buildKerberosRealmSettings(tempDir.resolve(name + ".keytab").toString())); + return realm(name, KerberosRealmSettings.TYPE, builder).build(); + } + + private Settings.Builder realm(final String name, final String type, final Settings.Builder settings) { + final String prefix = RealmSettings.PREFIX + name + "."; + if (type != null) { + settings.put("type", type); + } + final Settings.Builder builder = Settings.builder().put(settings.normalizePrefix(prefix).build(), false); + return builder; + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java new file mode 100644 index 0000000000000..69ebe15c5d74b --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData; +import org.ietf.jgss.GSSException; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; + +import javax.security.auth.login.LoginException; + +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.AdditionalMatchers.aryEq; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class KerberosRealmCacheTests extends KerberosRealmTestCase { + + public void testAuthenticateWithCache() throws LoginException, GSSException { + final String username = randomPrincipalName(); + final String outToken = randomAlphaOfLength(10); + final KerberosRealm kerberosRealm = createKerberosRealm(username); + + final String expectedUsername = maybeRemoveRealmName(username); + final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, null, true); + final byte[] decodedTicket = randomByteArrayOfLength(10); + final Path keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + final boolean krbDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); + final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); + + // authenticate + final User user1 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); + // authenticate with cache + final User user2 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); + + assertThat(user1, sameInstance(user2)); + verify(mockKerberosTicketValidator, times(2)).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), + any(ActionListener.class)); + verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).resolveRoles(any(UserData.class), any(ActionListener.class)); + verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); + } + + public void testCacheInvalidationScenarios() throws LoginException, GSSException { + final String outToken = randomAlphaOfLength(10); + final List userNames = Arrays.asList(randomPrincipalName(), randomPrincipalName()); + final KerberosRealm kerberosRealm = createKerberosRealm(userNames.toArray(new String[0])); + verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + + final String authNUsername = randomFrom(userNames); + final byte[] decodedTicket = randomByteArrayOfLength(10); + final Path keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + final boolean krbDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(authNUsername, outToken), null); + final String expectedUsername = maybeRemoveRealmName(authNUsername); + final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, null, true); + + final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); + final User user1 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); + + final String expireThisUser = randomFrom(userNames); + boolean expireAll = randomBoolean(); + if (expireAll) { + kerberosRealm.expireAll(); + } else { + kerberosRealm.expire(maybeRemoveRealmName(expireThisUser)); + } + + final User user2 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); + + if (expireAll || expireThisUser.equals(authNUsername)) { + assertThat(user1, is(not(sameInstance(user2)))); + verify(mockNativeRoleMappingStore, times(2)).resolveRoles(any(UserData.class), any(ActionListener.class)); + } else { + assertThat(user1, sameInstance(user2)); + verify(mockNativeRoleMappingStore).resolveRoles(any(UserData.class), any(ActionListener.class)); + } + verify(mockKerberosTicketValidator, times(2)).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), + any(ActionListener.class)); + verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); + } + + public void testAuthenticateWithValidTicketSucessAuthnWithUserDetailsWhenCacheDisabled() + throws LoginException, GSSException, IOException { + // if cache.ttl <= 0 then the cache is disabled + settings = KerberosTestCase.buildKerberosRealmSettings( + KerberosTestCase.writeKeyTab(dir.resolve("key.keytab"), randomAlphaOfLength(4)).toString(), 100, "0m", true, + randomBoolean()); + final String username = randomPrincipalName(); + final String outToken = randomAlphaOfLength(10); + final KerberosRealm kerberosRealm = createKerberosRealm(username); + + final String expectedUsername = maybeRemoveRealmName(username); + final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, null, true); + final byte[] decodedTicket = randomByteArrayOfLength(10); + final Path keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + final boolean krbDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, outToken), null); + final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); + + // authenticate + final User user1 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); + // authenticate when cache has been disabled + final User user2 = authenticateAndAssertResult(kerberosRealm, expectedUser, kerberosAuthenticationToken, outToken); + + assertThat(user1, not(sameInstance(user2))); + verify(mockKerberosTicketValidator, times(2)).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), + any(ActionListener.class)); + verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore, times(2)).resolveRoles(any(UserData.class), any(ActionListener.class)); + verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); + } + + private User authenticateAndAssertResult(final KerberosRealm kerberosRealm, final User expectedUser, + final KerberosAuthenticationToken kerberosAuthenticationToken, String outToken) { + final PlainActionFuture future = PlainActionFuture.newFuture(); + kerberosRealm.authenticate(kerberosAuthenticationToken, future); + final AuthenticationResult result = future.actionGet(); + assertSuccessAuthenticationResult(expectedUser, outToken, result); + return result.getUser(); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmSettingsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmSettingsTests.java new file mode 100644 index 0000000000000..2e47d03d49d06 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmSettingsTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class KerberosRealmSettingsTests extends ESTestCase { + + public void testKerberosRealmSettings() throws IOException { + final Path dir = createTempDir(); + Path configDir = dir.resolve("config"); + if (Files.exists(configDir) == false) { + configDir = Files.createDirectory(configDir); + } + final String keytabPathConfig = "config" + dir.getFileSystem().getSeparator() + "http.keytab"; + KerberosTestCase.writeKeyTab(dir.resolve(keytabPathConfig), null); + final Integer maxUsers = randomInt(); + final String cacheTTL = randomLongBetween(10L, 100L) + "m"; + final boolean enableDebugLogs = randomBoolean(); + final boolean removeRealmName = randomBoolean(); + final Settings settings = KerberosTestCase.buildKerberosRealmSettings(keytabPathConfig, maxUsers, cacheTTL, enableDebugLogs, + removeRealmName); + + assertThat(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(settings), equalTo(keytabPathConfig)); + assertThat(KerberosRealmSettings.CACHE_TTL_SETTING.get(settings), + equalTo(TimeValue.parseTimeValue(cacheTTL, KerberosRealmSettings.CACHE_TTL_SETTING.getKey()))); + assertThat(KerberosRealmSettings.CACHE_MAX_USERS_SETTING.get(settings), equalTo(maxUsers)); + assertThat(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(settings), is(enableDebugLogs)); + assertThat(KerberosRealmSettings.SETTING_REMOVE_REALM_NAME.get(settings), is(removeRealmName)); + } + +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTestCase.java new file mode 100644 index 0000000000000..9c2c6484c82ab --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTestCase.java @@ -0,0 +1,168 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; +import org.junit.After; +import org.junit.Before; + +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.AdditionalMatchers.aryEq; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +public abstract class KerberosRealmTestCase extends ESTestCase { + + protected Path dir; + protected ThreadPool threadPool; + protected Settings globalSettings; + protected ResourceWatcherService resourceWatcherService; + protected Settings settings; + protected RealmConfig config; + + protected KerberosTicketValidator mockKerberosTicketValidator; + protected NativeRoleMappingStore mockNativeRoleMappingStore; + + protected static final Set roles = Sets.newHashSet("admin", "kibana_user"); + + @Before + public void setup() throws Exception { + threadPool = new TestThreadPool("kerb realm tests"); + resourceWatcherService = new ResourceWatcherService(Settings.EMPTY, threadPool); + dir = createTempDir(); + globalSettings = Settings.builder().put("path.home", dir).build(); + settings = KerberosTestCase.buildKerberosRealmSettings(KerberosTestCase.writeKeyTab(dir.resolve("key.keytab"), "asa").toString(), + 100, "10m", true, randomBoolean()); + } + + @After + public void shutdown() throws InterruptedException { + resourceWatcherService.stop(); + terminate(threadPool); + } + + protected void mockKerberosTicketValidator(final byte[] decodedTicket, final Path keytabPath, final boolean krbDebug, + final Tuple value, final Exception e) { + assert value != null || e != null; + doAnswer((i) -> { + ActionListener> listener = (ActionListener>) i.getArguments()[3]; + if (e != null) { + listener.onFailure(e); + } else { + listener.onResponse(value); + } + return null; + }).when(mockKerberosTicketValidator).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), any(ActionListener.class)); + } + + protected void assertSuccessAuthenticationResult(final User expectedUser, final String outToken, final AuthenticationResult result) { + assertThat(result, is(notNullValue())); + assertThat(result.getStatus(), is(equalTo(AuthenticationResult.Status.SUCCESS))); + assertThat(result.getUser(), is(equalTo(expectedUser))); + final Map> responseHeaders = threadPool.getThreadContext().getResponseHeaders(); + assertThat(responseHeaders, is(notNullValue())); + assertThat(responseHeaders.get(KerberosAuthenticationToken.WWW_AUTHENTICATE).get(0), + is(equalTo(KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX + outToken))); + } + + protected KerberosRealm createKerberosRealm(final String... userForRoleMapping) { + config = new RealmConfig("test-kerb-realm", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), + new ThreadContext(globalSettings)); + mockNativeRoleMappingStore = roleMappingStore(Arrays.asList(userForRoleMapping)); + mockKerberosTicketValidator = mock(KerberosTicketValidator.class); + final KerberosRealm kerberosRealm = + new KerberosRealm(config, mockNativeRoleMappingStore, mockKerberosTicketValidator, threadPool, null); + return kerberosRealm; + } + + @SuppressWarnings("unchecked") + protected NativeRoleMappingStore roleMappingStore(final List userNames) { + final List expectedUserNames = userNames.stream().map(this::maybeRemoveRealmName).collect(Collectors.toList()); + final Client mockClient = mock(Client.class); + when(mockClient.threadPool()).thenReturn(threadPool); + when(mockClient.settings()).thenReturn(settings); + + final NativeRoleMappingStore store = new NativeRoleMappingStore(Settings.EMPTY, mockClient, mock(SecurityIndexManager.class)); + final NativeRoleMappingStore roleMapper = spy(store); + + doAnswer(invocation -> { + final UserRoleMapper.UserData userData = (UserRoleMapper.UserData) invocation.getArguments()[0]; + final ActionListener> listener = (ActionListener>) invocation.getArguments()[1]; + if (expectedUserNames.contains(userData.getUsername())) { + listener.onResponse(roles); + } else { + listener.onFailure( + Exceptions.authorizationError("Expected UPN '" + expectedUserNames + "' but was '" + userData.getUsername() + "'")); + } + return null; + }).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), any(ActionListener.class)); + + return roleMapper; + } + + protected String randomPrincipalName() { + final StringBuilder principalName = new StringBuilder(); + principalName.append(randomAlphaOfLength(5)); + final boolean withInstance = randomBoolean(); + if (withInstance) { + principalName.append("/").append(randomAlphaOfLength(5)); + } + principalName.append(randomAlphaOfLength(5).toUpperCase(Locale.ROOT)); + return principalName.toString(); + } + + /** + * Usually principal names are in the form 'user/instance@REALM'. This method + * removes '@REALM' part from the principal name if + * {@link KerberosRealmSettings#SETTING_REMOVE_REALM_NAME} is {@code true} else + * will return the input string. + * + * @param principalName user principal name + * @return username after removal of realm + */ + protected String maybeRemoveRealmName(final String principalName) { + if (KerberosRealmSettings.SETTING_REMOVE_REALM_NAME.get(settings)) { + int foundAtIndex = principalName.indexOf('@'); + if (foundAtIndex > 0) { + return principalName.substring(0, foundAtIndex); + } + } + return principalName; + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java new file mode 100644 index 0000000000000..43536abaf29e1 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData; +import org.ietf.jgss.GSSException; + +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.Arrays; + +import javax.security.auth.login.LoginException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.AdditionalMatchers.aryEq; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class KerberosRealmTests extends KerberosRealmTestCase { + + public void testSupports() { + final KerberosRealm kerberosRealm = createKerberosRealm("test@REALM"); + + final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(randomByteArrayOfLength(5)); + assertThat(kerberosRealm.supports(kerberosAuthenticationToken), is(true)); + final UsernamePasswordToken usernamePasswordToken = + new UsernamePasswordToken(randomAlphaOfLength(5), new SecureString(new char[] { 'a', 'b', 'c' })); + assertThat(kerberosRealm.supports(usernamePasswordToken), is(false)); + } + + public void testAuthenticateWithValidTicketSucessAuthnWithUserDetails() throws LoginException, GSSException { + final String username = randomPrincipalName(); + final KerberosRealm kerberosRealm = createKerberosRealm(username); + final String expectedUsername = maybeRemoveRealmName(username); + final User expectedUser = new User(expectedUsername, roles.toArray(new String[roles.size()]), null, null, null, true); + final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); + final Path keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + final boolean krbDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>(username, "out-token"), null); + final KerberosAuthenticationToken kerberosAuthenticationToken = new KerberosAuthenticationToken(decodedTicket); + + final PlainActionFuture future = new PlainActionFuture<>(); + kerberosRealm.authenticate(kerberosAuthenticationToken, future); + assertSuccessAuthenticationResult(expectedUser, "out-token", future.actionGet()); + + verify(mockKerberosTicketValidator, times(1)).validateTicket(aryEq(decodedTicket), eq(keytabPath), eq(krbDebug), + any(ActionListener.class)); + verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).resolveRoles(any(UserData.class), any(ActionListener.class)); + verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); + } + + public void testFailedAuthorization() throws LoginException, GSSException { + final String username = randomPrincipalName(); + final KerberosRealm kerberosRealm = createKerberosRealm(username); + final byte[] decodedTicket = "base64encodedticket".getBytes(StandardCharsets.UTF_8); + final Path keytabPath = config.env().configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(config.settings())); + final boolean krbDebug = KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.get(config.settings()); + mockKerberosTicketValidator(decodedTicket, keytabPath, krbDebug, new Tuple<>("does-not-exist@REALM", "out-token"), null); + + final PlainActionFuture future = new PlainActionFuture<>(); + kerberosRealm.authenticate(new KerberosAuthenticationToken(decodedTicket), future); + + ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e.status(), is(RestStatus.FORBIDDEN)); + assertThat(e.getMessage(), equalTo("Expected UPN '" + Arrays.asList(maybeRemoveRealmName(username)) + "' but was '" + + maybeRemoveRealmName("does-not-exist@REALM") + "'")); + } + + public void testLookupUser() { + final String username = randomPrincipalName(); + final KerberosRealm kerberosRealm = createKerberosRealm(username); + final PlainActionFuture future = new PlainActionFuture<>(); + kerberosRealm.lookupUser(username, future); + assertThat(future.actionGet(), is(nullValue())); + } + +} \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java new file mode 100644 index 0000000000000..891f400c7be60 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTestCase.java @@ -0,0 +1,223 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import javax.security.auth.Subject; + +/** + * Base Test class for Kerberos. + *

    + * Takes care of starting {@link SimpleKdcLdapServer} as Kdc server backed by + * Ldap Server. + *

    + * Also assists in building principal names, creation of principals and realm + * settings. + */ +public abstract class KerberosTestCase extends ESTestCase { + + protected Settings globalSettings; + protected Settings settings; + protected List serviceUserNames; + protected List clientUserNames; + protected Path workDir = null; + + protected SimpleKdcLdapServer simpleKdcLdapServer; + + private static Locale restoreLocale; + private static Set unsupportedLocaleLanguages; + static { + unsupportedLocaleLanguages = new HashSet<>(); + /* + * arabic and other languages have problem due to handling of GeneralizedTime in + * SimpleKdcServer For more look at : + * org.apache.kerby.asn1.type.Asn1GeneralizedTime#toBytes() + */ + unsupportedLocaleLanguages.add("ar"); + unsupportedLocaleLanguages.add("ja"); + unsupportedLocaleLanguages.add("th"); + unsupportedLocaleLanguages.add("hi"); + unsupportedLocaleLanguages.add("uz"); + unsupportedLocaleLanguages.add("fa"); + unsupportedLocaleLanguages.add("ks"); + } + + @BeforeClass + public static void setupKerberos() throws Exception { + if (isLocaleUnsupported()) { + Logger logger = Loggers.getLogger(KerberosTestCase.class); + logger.warn("Attempting to run Kerberos test on {} locale, but that breaks SimpleKdcServer. Switching to English.", + Locale.getDefault()); + restoreLocale = Locale.getDefault(); + Locale.setDefault(Locale.ENGLISH); + } + } + + @AfterClass + public static void restoreLocale() throws Exception { + if (restoreLocale != null) { + Locale.setDefault(restoreLocale); + restoreLocale = null; + } + } + + private static boolean isLocaleUnsupported() { + return unsupportedLocaleLanguages.contains(Locale.getDefault().getLanguage()); + } + + @Before + public void startSimpleKdcLdapServer() throws Exception { + workDir = createTempDir(); + globalSettings = Settings.builder().put("path.home", workDir).build(); + + final Path kdcLdiff = getDataPath("/kdc.ldiff"); + simpleKdcLdapServer = new SimpleKdcLdapServer(workDir, "com", "example", kdcLdiff); + + // Create SPNs and UPNs + serviceUserNames = new ArrayList<>(); + Randomness.get().ints(randomIntBetween(1, 6)).forEach((i) -> { + serviceUserNames.add("HTTP/" + randomAlphaOfLength(8)); + }); + final Path ktabPathForService = createPrincipalKeyTab(workDir, serviceUserNames.toArray(new String[0])); + clientUserNames = new ArrayList<>(); + Randomness.get().ints(randomIntBetween(1, 6)).forEach((i) -> { + String clientUserName = "client-" + randomAlphaOfLength(8); + clientUserNames.add(clientUserName); + try { + createPrincipal(clientUserName, "pwd".toCharArray()); + } catch (Exception e) { + throw ExceptionsHelper.convertToRuntime(e); + } + }); + settings = buildKerberosRealmSettings(ktabPathForService.toString()); + } + + @After + public void tearDownMiniKdc() throws IOException, PrivilegedActionException { + simpleKdcLdapServer.stop(); + } + + /** + * Creates principals and exports them to the keytab created in the directory. + * + * @param dir Directory where the key tab would be created. + * @param princNames principal names to be created + * @return {@link Path} to key tab file. + * @throws Exception + */ + protected Path createPrincipalKeyTab(final Path dir, final String... princNames) throws Exception { + final Path path = dir.resolve(randomAlphaOfLength(10) + ".keytab"); + simpleKdcLdapServer.createPrincipal(path, princNames); + return path; + } + + /** + * Creates principal with given name and password. + * + * @param principalName Principal name + * @param password Password + * @throws Exception + */ + protected void createPrincipal(final String principalName, final char[] password) throws Exception { + simpleKdcLdapServer.createPrincipal(principalName, new String(password)); + } + + /** + * Appends realm name to user to form principal name + * + * @param user user name + * @return principal name in the form user@REALM + */ + protected String principalName(final String user) { + return user + "@" + simpleKdcLdapServer.getRealm(); + } + + /** + * Invokes Subject.doAs inside a doPrivileged block + * + * @param subject {@link Subject} + * @param action {@link PrivilegedExceptionAction} action for performing inside + * Subject.doAs + * @return Type of value as returned by PrivilegedAction + * @throws PrivilegedActionException + */ + static T doAsWrapper(final Subject subject, final PrivilegedExceptionAction action) throws PrivilegedActionException { + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> Subject.doAs(subject, action)); + } + + /** + * Write content to provided keytab file. + * + * @param keytabPath {@link Path} to keytab file. + * @param content Content for keytab + * @return key tab path + * @throws IOException + */ + public static Path writeKeyTab(final Path keytabPath, final String content) throws IOException { + try (BufferedWriter bufferedWriter = Files.newBufferedWriter(keytabPath, StandardCharsets.US_ASCII)) { + bufferedWriter.write(Strings.isNullOrEmpty(content) ? "test-content" : content); + } + return keytabPath; + } + + /** + * Build kerberos realm settings with default config and given keytab + * + * @param keytabPath key tab file path + * @return {@link Settings} for kerberos realm + */ + public static Settings buildKerberosRealmSettings(final String keytabPath) { + return buildKerberosRealmSettings(keytabPath, 100, "10m", true, false); + } + + /** + * Build kerberos realm settings + * + * @param keytabPath key tab file path + * @param maxUsersInCache max users to be maintained in cache + * @param cacheTTL time to live for cached entries + * @param enableDebugging for krb5 logs + * @param removeRealmName {@code true} if we want to remove realm name from the username of form 'user@REALM' + * @return {@link Settings} for kerberos realm + */ + public static Settings buildKerberosRealmSettings(final String keytabPath, final int maxUsersInCache, final String cacheTTL, + final boolean enableDebugging, final boolean removeRealmName) { + final Settings.Builder builder = Settings.builder().put(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.getKey(), keytabPath) + .put(KerberosRealmSettings.CACHE_MAX_USERS_SETTING.getKey(), maxUsersInCache) + .put(KerberosRealmSettings.CACHE_TTL_SETTING.getKey(), cacheTTL) + .put(KerberosRealmSettings.SETTING_KRB_DEBUG_ENABLE.getKey(), enableDebugging) + .put(KerberosRealmSettings.SETTING_REMOVE_REALM_NAME.getKey(), removeRealmName); + return builder.build(); + } + +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidatorTests.java new file mode 100644 index 0000000000000..8f35e0bde4454 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosTicketValidatorTests.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.ietf.jgss.GSSException; + +import java.io.IOException; +import java.nio.file.Path; +import java.security.PrivilegedActionException; +import java.util.Base64; +import java.util.concurrent.ExecutionException; + +import javax.security.auth.login.LoginException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class KerberosTicketValidatorTests extends KerberosTestCase { + + private KerberosTicketValidator kerberosTicketValidator = new KerberosTicketValidator(); + + public void testKerbTicketGeneratedForDifferentServerFailsValidation() throws Exception { + createPrincipalKeyTab(workDir, "differentServer"); + + // Client login and init token preparation + final String clientUserName = randomFrom(clientUserNames); + try (SpnegoClient spnegoClient = + new SpnegoClient(principalName(clientUserName), new SecureString("pwd".toCharArray()), principalName("differentServer"));) { + final String base64KerbToken = spnegoClient.getBase64EncodedTokenForSpnegoHeader(); + assertThat(base64KerbToken, is(notNullValue())); + + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(settings)); + final PlainActionFuture> future = new PlainActionFuture<>(); + kerberosTicketValidator.validateTicket(Base64.getDecoder().decode(base64KerbToken), keytabPath, true, future); + final GSSException gssException = expectThrows(GSSException.class, () -> unwrapExpectedExceptionFromFutureAndThrow(future)); + assertThat(gssException.getMajor(), equalTo(GSSException.FAILURE)); + } + } + + public void testInvalidKerbTicketFailsValidation() throws Exception { + final String base64KerbToken = Base64.getEncoder().encodeToString(randomByteArrayOfLength(5)); + + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(settings)); + kerberosTicketValidator.validateTicket(Base64.getDecoder().decode(base64KerbToken), keytabPath, true, + new ActionListener>() { + boolean exceptionHandled = false; + + @Override + public void onResponse(Tuple response) { + fail("expected exception to be thrown of type GSSException"); + } + + @Override + public void onFailure(Exception e) { + assertThat(exceptionHandled, is(false)); + assertThat(e, instanceOf(GSSException.class)); + assertThat(((GSSException) e).getMajor(), equalTo(GSSException.DEFECTIVE_TOKEN)); + exceptionHandled = true; + } + }); + } + + public void testWhenKeyTabWithInvalidContentFailsValidation() + throws LoginException, GSSException, IOException, PrivilegedActionException { + // Client login and init token preparation + final String clientUserName = randomFrom(clientUserNames); + try (SpnegoClient spnegoClient = new SpnegoClient(principalName(clientUserName), new SecureString("pwd".toCharArray()), + principalName(randomFrom(serviceUserNames)));) { + final String base64KerbToken = spnegoClient.getBase64EncodedTokenForSpnegoHeader(); + assertThat(base64KerbToken, is(notNullValue())); + + final Path ktabPath = writeKeyTab(workDir.resolve("invalid.keytab"), "not - a - valid - key - tab"); + settings = buildKerberosRealmSettings(ktabPath.toString()); + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(settings)); + final PlainActionFuture> future = new PlainActionFuture<>(); + kerberosTicketValidator.validateTicket(Base64.getDecoder().decode(base64KerbToken), keytabPath, true, future); + final GSSException gssException = expectThrows(GSSException.class, () -> unwrapExpectedExceptionFromFutureAndThrow(future)); + assertThat(gssException.getMajor(), equalTo(GSSException.FAILURE)); + } + } + + public void testValidKebrerosTicket() throws PrivilegedActionException, GSSException, LoginException { + // Client login and init token preparation + final String clientUserName = randomFrom(clientUserNames); + try (SpnegoClient spnegoClient = new SpnegoClient(principalName(clientUserName), new SecureString("pwd".toCharArray()), + principalName(randomFrom(serviceUserNames)));) { + final String base64KerbToken = spnegoClient.getBase64EncodedTokenForSpnegoHeader(); + assertThat(base64KerbToken, is(notNullValue())); + + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(settings)); + final PlainActionFuture> future = new PlainActionFuture<>(); + kerberosTicketValidator.validateTicket(Base64.getDecoder().decode(base64KerbToken), keytabPath, true, future); + assertThat(future.actionGet(), is(notNullValue())); + assertThat(future.actionGet().v1(), equalTo(principalName(clientUserName))); + assertThat(future.actionGet().v2(), is(notNullValue())); + + final String outToken = spnegoClient.handleResponse(future.actionGet().v2()); + assertThat(outToken, is(nullValue())); + assertThat(spnegoClient.isEstablished(), is(true)); + } + } + + private void unwrapExpectedExceptionFromFutureAndThrow(PlainActionFuture> future) throws Throwable { + try { + future.actionGet(); + } catch (Throwable t) { + Throwable throwThis = t; + while (throwThis instanceof UncategorizedExecutionException || throwThis instanceof ExecutionException) { + throwThis = throwThis.getCause(); + } + throw throwThis; + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java new file mode 100644 index 0000000000000..426cacb1a034c --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java @@ -0,0 +1,224 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import com.unboundid.ldap.listener.InMemoryDirectoryServer; +import com.unboundid.ldap.listener.InMemoryDirectoryServerConfig; + +import org.apache.kerby.kerberos.kerb.KrbException; +import org.apache.kerby.kerberos.kerb.client.KrbConfig; +import org.apache.kerby.kerberos.kerb.server.KdcConfigKey; +import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer; +import org.apache.kerby.util.NetworkUtil; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.Locale; +import java.util.concurrent.TimeUnit; + +/** + * Utility wrapper around Apache {@link SimpleKdcServer} backed by Unboundid + * {@link InMemoryDirectoryServer}.
    + * Starts in memory Ldap server and then uses it as backend for Kdc Server. + */ +public class SimpleKdcLdapServer { + private static final Logger logger = Loggers.getLogger(SimpleKdcLdapServer.class); + + private Path workDir = null; + private SimpleKdcServer simpleKdc; + private InMemoryDirectoryServer ldapServer; + + // KDC properties + private String transport = ESTestCase.randomFrom("TCP", "UDP"); + private int kdcPort = 0; + private String host; + private String realm; + private boolean krb5DebugBackupConfigValue; + + // LDAP properties + private String baseDn; + private Path ldiff; + private int ldapPort; + + /** + * Constructor for SimpleKdcLdapServer, creates instance of Kdc server and ldap + * backend server. Also initializes and starts them with provided configuration. + *

    + * To stop the KDC and ldap server use {@link #stop()} + * + * @param workDir Base directory for server, used to locate kdc.conf, + * backend.conf and kdc.ldiff + * @param orgName Org name for base dn + * @param domainName domain name for base dn + * @param ldiff for ldap directory. + * @throws Exception + */ + public SimpleKdcLdapServer(final Path workDir, final String orgName, final String domainName, final Path ldiff) throws Exception { + this.workDir = workDir; + this.realm = domainName.toUpperCase(Locale.ROOT) + "." + orgName.toUpperCase(Locale.ROOT); + this.baseDn = "dc=" + domainName + ",dc=" + orgName; + this.ldiff = ldiff; + this.krb5DebugBackupConfigValue = AccessController.doPrivileged(new PrivilegedExceptionAction() { + @Override + @SuppressForbidden(reason = "set or clear system property krb5 debug in kerberos tests") + public Boolean run() throws Exception { + boolean oldDebugSetting = Boolean.parseBoolean(System.getProperty("sun.security.krb5.debug")); + System.setProperty("sun.security.krb5.debug", Boolean.TRUE.toString()); + return oldDebugSetting; + } + }); + + AccessController.doPrivileged(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + init(); + return null; + } + }); + logger.info("SimpleKdcLdapServer started."); + } + + @SuppressForbidden(reason = "Uses Apache Kdc which requires usage of java.io.File in order to create a SimpleKdcServer") + private void init() throws Exception { + // start ldap server + createLdapServiceAndStart(); + // create ldap backend conf + createLdapBackendConf(); + // Kdc Server + simpleKdc = new SimpleKdcServer(this.workDir.toFile(), new KrbConfig()); + prepareKdcServerAndStart(); + } + + private void createLdapServiceAndStart() throws Exception { + InMemoryDirectoryServerConfig config = new InMemoryDirectoryServerConfig(baseDn); + config.setSchema(null); + ldapServer = new InMemoryDirectoryServer(config); + ldapServer.importFromLDIF(true, this.ldiff.toString()); + ldapServer.startListening(); + ldapPort = ldapServer.getListenPort(); + } + + private void createLdapBackendConf() throws IOException { + String backendConf = KdcConfigKey.KDC_IDENTITY_BACKEND.getPropertyKey() + + " = org.apache.kerby.kerberos.kdc.identitybackend.LdapIdentityBackend\n" + "host=127.0.0.1\n" + "port=" + ldapPort + "\n" + + "admin_dn=uid=admin,ou=system," + baseDn + "\n" + "admin_pw=secret\n" + "base_dn=" + baseDn; + Files.write(this.workDir.resolve("backend.conf"), backendConf.getBytes(StandardCharsets.UTF_8)); + assert Files.exists(this.workDir.resolve("backend.conf")); + } + + @SuppressForbidden(reason = "Uses Apache Kdc which requires usage of java.io.File in order to create a SimpleKdcServer") + private void prepareKdcServerAndStart() throws Exception { + // transport + simpleKdc.setWorkDir(workDir.toFile()); + simpleKdc.setKdcHost(host); + simpleKdc.setKdcRealm(realm); + if (kdcPort == 0) { + kdcPort = NetworkUtil.getServerPort(); + } + if (transport != null) { + if (transport.trim().equals("TCP")) { + simpleKdc.setKdcTcpPort(kdcPort); + simpleKdc.setAllowUdp(false); + } else if (transport.trim().equals("UDP")) { + simpleKdc.setKdcUdpPort(kdcPort); + simpleKdc.setAllowTcp(false); + } else { + throw new IllegalArgumentException("Invalid transport: " + transport); + } + } else { + throw new IllegalArgumentException("Need to set transport!"); + } + final TimeValue minimumTicketLifeTime = new TimeValue(1, TimeUnit.DAYS); + final TimeValue maxRenewableLifeTime = new TimeValue(7, TimeUnit.DAYS); + simpleKdc.getKdcConfig().setLong(KdcConfigKey.MINIMUM_TICKET_LIFETIME, minimumTicketLifeTime.getMillis()); + simpleKdc.getKdcConfig().setLong(KdcConfigKey.MAXIMUM_RENEWABLE_LIFETIME, maxRenewableLifeTime.getMillis()); + simpleKdc.init(); + simpleKdc.start(); + } + + public String getRealm() { + return realm; + } + + public int getLdapListenPort() { + return ldapPort; + } + + public int getKdcPort() { + return kdcPort; + } + + /** + * Creates a principal in the KDC with the specified user and password. + * + * @param principal principal name, do not include the domain. + * @param password password. + * @throws Exception thrown if the principal could not be created. + */ + public synchronized void createPrincipal(final String principal, final String password) throws Exception { + simpleKdc.createPrincipal(principal, password); + } + + /** + * Creates multiple principals in the KDC and adds them to a keytab file. + * + * @param keytabFile keytab file to add the created principals. If keytab file + * exists and then always appends to it. + * @param principals principals to add to the KDC, do not include the domain. + * @throws Exception thrown if the principals or the keytab file could not be + * created. + */ + @SuppressForbidden(reason = "Uses Apache Kdc which requires usage of java.io.File in order to create a SimpleKdcServer") + public synchronized void createPrincipal(final Path keytabFile, final String... principals) throws Exception { + simpleKdc.createPrincipals(principals); + for (String principal : principals) { + simpleKdc.getKadmin().exportKeytab(keytabFile.toFile(), principal); + } + } + + /** + * Stop Simple Kdc Server + * + * @throws PrivilegedActionException + */ + public synchronized void stop() throws PrivilegedActionException { + AccessController.doPrivileged(new PrivilegedExceptionAction() { + + @Override + @SuppressForbidden(reason = "set or clear system property krb5 debug in kerberos tests") + public Void run() throws Exception { + if (simpleKdc != null) { + try { + simpleKdc.stop(); + } catch (KrbException e) { + throw ExceptionsHelper.convertToRuntime(e); + } finally { + System.setProperty("sun.security.krb5.debug", Boolean.toString(krb5DebugBackupConfigValue)); + } + } + + if (ldapServer != null) { + ldapServer.shutDown(true); + } + return null; + } + }); + logger.info("SimpleKdcServer stoppped."); + } + +} \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServerTests.java new file mode 100644 index 0000000000000..b1c75d957a7c8 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServerTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import com.unboundid.ldap.sdk.LDAPConnection; +import com.unboundid.ldap.sdk.SearchResult; +import com.unboundid.ldap.sdk.SearchScope; + +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken; +import org.elasticsearch.xpack.security.authc.kerberos.KerberosTicketValidator; +import org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils; +import org.ietf.jgss.GSSException; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.PrivilegedActionException; +import java.text.ParseException; +import java.util.Base64; + +import javax.security.auth.login.LoginException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.notNullValue; + +public class SimpleKdcLdapServerTests extends KerberosTestCase { + + public void testPrincipalCreationAndSearchOnLdap() throws Exception { + simpleKdcLdapServer.createPrincipal(workDir.resolve("p1p2.keytab"), "p1", "p2"); + assertTrue(Files.exists(workDir.resolve("p1p2.keytab"))); + try (LDAPConnection ldapConn = + LdapUtils.privilegedConnect(() -> new LDAPConnection("localhost", simpleKdcLdapServer.getLdapListenPort()));) { + assertThat(ldapConn.isConnected(), is(true)); + SearchResult sr = ldapConn.search("dc=example,dc=com", SearchScope.SUB, "(krb5PrincipalName=p1@EXAMPLE.COM)"); + assertThat(sr.getSearchEntries(), hasSize(1)); + assertThat(sr.getSearchEntries().get(0).getDN(), equalTo("uid=p1,dc=example,dc=com")); + } + } + + public void testClientServiceMutualAuthentication() throws PrivilegedActionException, GSSException, LoginException, ParseException { + final String serviceUserName = randomFrom(serviceUserNames); + // Client login and init token preparation + final String clientUserName = randomFrom(clientUserNames); + try (SpnegoClient spnegoClient = + new SpnegoClient(principalName(clientUserName), new SecureString("pwd".toCharArray()), principalName(serviceUserName));) { + final String base64KerbToken = spnegoClient.getBase64EncodedTokenForSpnegoHeader(); + assertThat(base64KerbToken, is(notNullValue())); + final KerberosAuthenticationToken kerbAuthnToken = new KerberosAuthenticationToken(Base64.getDecoder().decode(base64KerbToken)); + + // Service Login + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final Path keytabPath = env.configFile().resolve(KerberosRealmSettings.HTTP_SERVICE_KEYTAB_PATH.get(settings)); + // Handle Authz header which contains base64 token + final PlainActionFuture> future = new PlainActionFuture<>(); + new KerberosTicketValidator().validateTicket((byte[]) kerbAuthnToken.credentials(), keytabPath, true, future); + assertThat(future.actionGet(), is(notNullValue())); + assertThat(future.actionGet().v1(), equalTo(principalName(clientUserName))); + + // Authenticate service on client side. + final String outToken = spnegoClient.handleResponse(future.actionGet().v2()); + assertThat(outToken, is(nullValue())); + assertThat(spnegoClient.isEstablished(), is(true)); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java new file mode 100644 index 0000000000000..1f883b928bd97 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoClient.java @@ -0,0 +1,257 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.security.authc.kerberos.KerberosTicketValidator; +import org.ietf.jgss.GSSContext; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSException; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.GSSName; + +import java.io.IOException; +import java.security.AccessController; +import java.security.Principal; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import javax.security.auth.Subject; +import javax.security.auth.callback.Callback; +import javax.security.auth.callback.CallbackHandler; +import javax.security.auth.callback.PasswordCallback; +import javax.security.auth.callback.UnsupportedCallbackException; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; + +/** + * This class is used as a Spnego client during testing and handles SPNEGO + * interactions using GSS context negotiation.
    + * It is not advisable to share a SpnegoClient between threads as there is no + * synchronization in place, internally this depends on {@link GSSContext} for + * context negotiation which maintains sequencing for replay detections.
    + * Use {@link #close()} to release and dispose {@link LoginContext} and + * {@link GSSContext} after usage. + */ +class SpnegoClient implements AutoCloseable { + private static final Logger LOGGER = ESLoggerFactory.getLogger(SpnegoClient.class); + + public static final String CRED_CONF_NAME = "PasswordConf"; + private static final String SUN_KRB5_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule"; + private final GSSManager gssManager = GSSManager.getInstance(); + private final LoginContext loginContext; + private final GSSContext gssContext; + + /** + * Creates SpengoClient to interact with given service principal
    + * Use {@link #close()} to logout {@link LoginContext} and dispose + * {@link GSSContext} after usage. + * @param userPrincipalName User principal name for login as client + * @param password password for client + * @param servicePrincipalName Service principal name with whom this client + * interacts with. + * @throws PrivilegedActionException + * @throws GSSException + */ + SpnegoClient(final String userPrincipalName, final SecureString password, final String servicePrincipalName) + throws PrivilegedActionException, GSSException { + String oldUseSubjectCredsOnlyFlag = null; + try { + oldUseSubjectCredsOnlyFlag = getAndSetUseSubjectCredsOnlySystemProperty("true"); + LOGGER.info("SpnegoClient with userPrincipalName : {}", userPrincipalName); + final GSSName gssUserPrincipalName = gssManager.createName(userPrincipalName, GSSName.NT_USER_NAME); + final GSSName gssServicePrincipalName = gssManager.createName(servicePrincipalName, GSSName.NT_USER_NAME); + loginContext = AccessController + .doPrivileged((PrivilegedExceptionAction) () -> loginUsingPassword(userPrincipalName, password)); + final GSSCredential userCreds = KerberosTestCase.doAsWrapper(loginContext.getSubject(), + (PrivilegedExceptionAction) () -> gssManager.createCredential(gssUserPrincipalName, + GSSCredential.DEFAULT_LIFETIME, KerberosTicketValidator.SPNEGO_OID, GSSCredential.INITIATE_ONLY)); + gssContext = gssManager.createContext(gssServicePrincipalName.canonicalize(KerberosTicketValidator.SPNEGO_OID), + KerberosTicketValidator.SPNEGO_OID, userCreds, GSSCredential.DEFAULT_LIFETIME); + gssContext.requestMutualAuth(true); + } catch (PrivilegedActionException pve) { + LOGGER.error("privileged action exception, with root cause", pve.getException()); + throw pve; + } finally { + getAndSetUseSubjectCredsOnlySystemProperty(oldUseSubjectCredsOnlyFlag); + } + } + + /** + * GSSContext initiator side handling, initiates context establishment and returns the + * base64 encoded token to be sent to server. + * + * @return Base64 encoded token + * @throws PrivilegedActionException + */ + String getBase64EncodedTokenForSpnegoHeader() throws PrivilegedActionException { + final byte[] outToken = KerberosTestCase.doAsWrapper(loginContext.getSubject(), + (PrivilegedExceptionAction) () -> gssContext.initSecContext(new byte[0], 0, 0)); + return Base64.getEncoder().encodeToString(outToken); + } + + /** + * Handles server response and returns new token if any to be sent to server. + * + * @param base64Token inToken received from server passed to initSecContext for + * gss negotiation + * @return Base64 encoded token to be sent to server. May return {@code null} if + * nothing to be sent. + * @throws PrivilegedActionException + */ + String handleResponse(final String base64Token) throws PrivilegedActionException { + if (gssContext.isEstablished()) { + throw new IllegalStateException("GSS Context has already been established"); + } + final byte[] token = Base64.getDecoder().decode(base64Token); + final byte[] outToken = KerberosTestCase.doAsWrapper(loginContext.getSubject(), + (PrivilegedExceptionAction) () -> gssContext.initSecContext(token, 0, token.length)); + if (outToken == null || outToken.length == 0) { + return null; + } + return Base64.getEncoder().encodeToString(outToken); + } + + /** + * Spnego Client after usage needs to be closed in order to logout from + * {@link LoginContext} and dispose {@link GSSContext} + */ + public void close() throws LoginException, GSSException, PrivilegedActionException { + if (loginContext != null) { + AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + loginContext.logout(); + return null; + }); + } + if (gssContext != null) { + AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + gssContext.dispose(); + return null; + }); + } + } + + /** + * @return {@code true} If the gss security context was established + */ + boolean isEstablished() { + return gssContext.isEstablished(); + } + + /** + * Performs authentication using provided principal name and password for client + * + * @param principal Principal name + * @param password {@link SecureString} + * @param settings {@link Settings} + * @return authenticated {@link LoginContext} instance. Note: This needs to be + * closed {@link LoginContext#logout()} after usage. + * @throws LoginException + */ + private static LoginContext loginUsingPassword(final String principal, final SecureString password) throws LoginException { + final Set principals = Collections.singleton(new KerberosPrincipal(principal)); + + final Subject subject = new Subject(false, principals, Collections.emptySet(), Collections.emptySet()); + + final Configuration conf = new PasswordJaasConf(principal); + final CallbackHandler callback = new KrbCallbackHandler(principal, password); + final LoginContext loginContext = new LoginContext(CRED_CONF_NAME, subject, callback, conf); + loginContext.login(); + return loginContext; + } + + /** + * Usually we would have a JAAS configuration file for login configuration. + * Instead of an additional file setting as we do not want the options to be + * customizable we are constructing it in memory. + *

    + * As we are uing this instead of jaas.conf, this requires refresh of + * {@link Configuration} and reqires appropriate security permissions to do so. + */ + static class PasswordJaasConf extends Configuration { + private final String principal; + + PasswordJaasConf(final String principal) { + this.principal = principal; + } + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(final String name) { + final Map options = new HashMap<>(); + options.put("principal", principal); + options.put("storeKey", Boolean.TRUE.toString()); + options.put("isInitiator", Boolean.TRUE.toString()); + options.put("debug", Boolean.TRUE.toString()); + // Refresh Krb5 config during tests as the port keeps changing for kdc server + options.put("refreshKrb5Config", Boolean.TRUE.toString()); + + return new AppConfigurationEntry[] { new AppConfigurationEntry(SUN_KRB5_LOGIN_MODULE, + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, Collections.unmodifiableMap(options)) }; + } + } + + /** + * Jaas call back handler to provide credentials. + */ + static class KrbCallbackHandler implements CallbackHandler { + private final String principal; + private final SecureString password; + + KrbCallbackHandler(final String principal, final SecureString password) { + this.principal = principal; + this.password = password; + } + + public void handle(final Callback[] callbacks) throws IOException, UnsupportedCallbackException { + for (Callback callback : callbacks) { + if (callback instanceof PasswordCallback) { + PasswordCallback pc = (PasswordCallback) callback; + if (pc.getPrompt().contains(principal)) { + pc.setPassword(password.getChars()); + break; + } + } + } + } + } + + private static String getAndSetUseSubjectCredsOnlySystemProperty(final String value) { + String retVal = null; + try { + retVal = AccessController.doPrivileged(new PrivilegedExceptionAction() { + + @Override + @SuppressForbidden( + reason = "For tests where we provide credentials, need to set and reset javax.security.auth.useSubjectCredsOnly") + public String run() throws Exception { + String oldValue = System.getProperty("javax.security.auth.useSubjectCredsOnly"); + if (value != null) { + System.setProperty("javax.security.auth.useSubjectCredsOnly", value); + } + return oldValue; + } + + }); + } catch (PrivilegedActionException e) { + throw ExceptionsHelper.convertToRuntime(e); + } + return retVal; + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 9e33e145e24fe..7722a9d216632 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -90,6 +90,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; +import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequestBuilder; @@ -113,6 +115,9 @@ import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; @@ -122,6 +127,7 @@ import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import org.elasticsearch.xpack.sql.action.SqlQueryAction; import org.elasticsearch.xpack.sql.action.SqlQueryRequest; import org.junit.Before; @@ -129,12 +135,15 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.function.Predicate; import static java.util.Arrays.asList; import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; @@ -174,8 +183,8 @@ public void setup() { rolesStore = mock(CompositeRolesStore.class); clusterService = mock(ClusterService.class); final Settings settings = Settings.builder() - .put("search.remote.other_cluster.seeds", "localhost:9999") - .build(); + .put("search.remote.other_cluster.seeds", "localhost:9999") + .build(); final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); auditTrail = mock(AuditTrailService.class); @@ -183,9 +192,20 @@ public void setup() { threadPool = mock(ThreadPool.class); when(threadPool.getThreadContext()).thenReturn(threadContext); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings); + + final NativePrivilegeStore privilegesStore = mock(NativePrivilegeStore.class); + doAnswer(i -> { + assertThat(i.getArguments().length, equalTo(3)); + final Object arg2 = i.getArguments()[2]; + assertThat(arg2, instanceOf(ActionListener.class)); + ActionListener> listener = (ActionListener>) arg2; + listener.onResponse(Collections.emptyList()); + return null; + } + ).when(privilegesStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); + doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[2]; + ActionListener callback = (ActionListener) i.getArguments()[2]; Set names = (Set) i.getArguments()[0]; assertNotNull(names); Set roleDescriptors = new HashSet<>(); @@ -199,22 +219,23 @@ public void setup() { if (roleDescriptors.isEmpty()) { callback.onResponse(Role.EMPTY); } else { - callback.onResponse( - CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache)); + CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache, privilegesStore, + ActionListener.wrap(r -> callback.onResponse(r), callback::onFailure) + ); } return Void.TYPE; }).when(rolesStore).roles(any(Set.class), any(FieldPermissionsCache.class), any(ActionListener.class)); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, - auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings)); + auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings)); } private void authorize(Authentication authentication, String action, TransportRequest request) { PlainActionFuture future = new PlainActionFuture<>(); AuthorizationUtils.AsyncAuthorizer authorizer = new AuthorizationUtils.AsyncAuthorizer(authentication, future, - (userRoles, runAsRoles) -> { - authorizationService.authorize(authentication, action, request, userRoles, runAsRoles); - future.onResponse(null); - }); + (userRoles, runAsRoles) -> { + authorizationService.authorize(authentication, action, request, userRoles, runAsRoles); + future.onResponse(null); + }); authorizer.authorize(authorizationService); future.actionGet(); } @@ -226,11 +247,11 @@ public void testActionsSystemUserIsAuthorized() { Authentication authentication = createAuthentication(SystemUser.INSTANCE); authorize(authentication, "indices:monitor/whatever", request); verify(auditTrail).accessGranted(authentication, "indices:monitor/whatever", request, - new String[] { SystemUser.ROLE_NAME }); + new String[]{SystemUser.ROLE_NAME}); authentication = createAuthentication(SystemUser.INSTANCE); authorize(authentication, "internal:whatever", request); - verify(auditTrail).accessGranted(authentication, "internal:whatever", request, new String[] { SystemUser.ROLE_NAME }); + verify(auditTrail).accessGranted(authentication, "internal:whatever", request, new String[]{SystemUser.ROLE_NAME}); verifyNoMoreInteractions(auditTrail); } @@ -238,9 +259,9 @@ public void testIndicesActionsAreNotAuthorized() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(SystemUser.INSTANCE); assertThrowsAuthorizationException( - () -> authorize(authentication, "indices:", request), - "indices:", SystemUser.INSTANCE.principal()); - verify(auditTrail).accessDenied(authentication, "indices:", request, new String[] { SystemUser.ROLE_NAME }); + () -> authorize(authentication, "indices:", request), + "indices:", SystemUser.INSTANCE.principal()); + verify(auditTrail).accessDenied(authentication, "indices:", request, new String[]{SystemUser.ROLE_NAME}); verifyNoMoreInteractions(auditTrail); } @@ -248,10 +269,10 @@ public void testClusterAdminActionsAreNotAuthorized() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(SystemUser.INSTANCE); assertThrowsAuthorizationException( - () -> authorize(authentication, "cluster:admin/whatever", request), - "cluster:admin/whatever", SystemUser.INSTANCE.principal()); + () -> authorize(authentication, "cluster:admin/whatever", request), + "cluster:admin/whatever", SystemUser.INSTANCE.principal()); verify(auditTrail).accessDenied(authentication, "cluster:admin/whatever", request, - new String[] { SystemUser.ROLE_NAME }); + new String[]{SystemUser.ROLE_NAME}); verifyNoMoreInteractions(auditTrail); } @@ -259,10 +280,50 @@ public void testClusterAdminSnapshotStatusActionIsNotAuthorized() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(SystemUser.INSTANCE); assertThrowsAuthorizationException( - () -> authorize(authentication, "cluster:admin/snapshot/status", request), - "cluster:admin/snapshot/status", SystemUser.INSTANCE.principal()); + () -> authorize(authentication, "cluster:admin/snapshot/status", request), + "cluster:admin/snapshot/status", SystemUser.INSTANCE.principal()); verify(auditTrail).accessDenied(authentication, "cluster:admin/snapshot/status", request, - new String[] { SystemUser.ROLE_NAME }); + new String[]{SystemUser.ROLE_NAME}); + verifyNoMoreInteractions(auditTrail); + } + + public void testAuthorizeUsingConditionalPrivileges() { + final DeletePrivilegesRequest request = new DeletePrivilegesRequest(); + final Authentication authentication = createAuthentication(new User("user1", "role1")); + + final ConditionalClusterPrivilege conditionalClusterPrivilege = Mockito.mock(ConditionalClusterPrivilege.class); + final Predicate requestPredicate = r -> r == request; + Mockito.when(conditionalClusterPrivilege.getRequestPredicate()).thenReturn(requestPredicate); + Mockito.when(conditionalClusterPrivilege.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); + final ConditionalClusterPrivilege[] conditionalClusterPrivileges = new ConditionalClusterPrivilege[] { + conditionalClusterPrivilege + }; + RoleDescriptor role = new RoleDescriptor("role1", null, null, null, conditionalClusterPrivileges, null, null ,null); + roleMap.put("role1", role); + + authorize(authentication, DeletePrivilegesAction.NAME, request); + verify(auditTrail).accessGranted(authentication, DeletePrivilegesAction.NAME, request, new String[]{role.getName()}); + verifyNoMoreInteractions(auditTrail); + } + + public void testAuthorizationDeniedWhenConditionalPrivilegesDoNotMatch() { + final DeletePrivilegesRequest request = new DeletePrivilegesRequest(); + final Authentication authentication = createAuthentication(new User("user1", "role1")); + + final ConditionalClusterPrivilege conditionalClusterPrivilege = Mockito.mock(ConditionalClusterPrivilege.class); + final Predicate requestPredicate = r -> false; + Mockito.when(conditionalClusterPrivilege.getRequestPredicate()).thenReturn(requestPredicate); + Mockito.when(conditionalClusterPrivilege.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); + final ConditionalClusterPrivilege[] conditionalClusterPrivileges = new ConditionalClusterPrivilege[] { + conditionalClusterPrivilege + }; + RoleDescriptor role = new RoleDescriptor("role1", null, null, null, conditionalClusterPrivileges, null, null ,null); + roleMap.put("role1", role); + + assertThrowsAuthorizationException( + () -> authorize(authentication, DeletePrivilegesAction.NAME, request), + DeletePrivilegesAction.NAME, "user1"); + verify(auditTrail).accessDenied(authentication, DeletePrivilegesAction.NAME, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } @@ -271,8 +332,8 @@ public void testNoRolesCausesDenial() { final Authentication authentication = createAuthentication(new User("test user")); mockEmptyMetaData(); assertThrowsAuthorizationException( - () -> authorize(authentication, "indices:a", request), - "indices:a", "test user"); + () -> authorize(authentication, "indices:a", request), + "indices:a", "test user"); verify(auditTrail).accessDenied(authentication, "indices:a", request, Role.EMPTY.names()); verifyNoMoreInteractions(auditTrail); } @@ -298,8 +359,8 @@ public void testUserWithNoRolesCannotPerformLocalSearch() { final Authentication authentication = createAuthentication(new User("test user")); mockEmptyMetaData(); assertThrowsAuthorizationException( - () -> authorize(authentication, SearchAction.NAME, request), - SearchAction.NAME, "test user"); + () -> authorize(authentication, SearchAction.NAME, request), + SearchAction.NAME, "test user"); verify(auditTrail).accessDenied(authentication, SearchAction.NAME, request, Role.EMPTY.names()); verifyNoMoreInteractions(auditTrail); } @@ -314,8 +375,8 @@ public void testUserWithNoRolesCanPerformMultiClusterSearch() { final Authentication authentication = createAuthentication(new User("test user")); mockEmptyMetaData(); assertThrowsAuthorizationException( - () -> authorize(authentication, SearchAction.NAME, request), - SearchAction.NAME, "test user"); + () -> authorize(authentication, SearchAction.NAME, request), + SearchAction.NAME, "test user"); verify(auditTrail).accessDenied(authentication, SearchAction.NAME, request, Role.EMPTY.names()); verifyNoMoreInteractions(auditTrail); } @@ -325,12 +386,11 @@ public void testUserWithNoRolesCannotSql() { Authentication authentication = createAuthentication(new User("test user")); mockEmptyMetaData(); assertThrowsAuthorizationException( - () -> authorize(authentication, SqlQueryAction.NAME, request), - SqlQueryAction.NAME, "test user"); + () -> authorize(authentication, SqlQueryAction.NAME, request), + SqlQueryAction.NAME, "test user"); verify(auditTrail).accessDenied(authentication, SqlQueryAction.NAME, request, Role.EMPTY.names()); verifyNoMoreInteractions(auditTrail); } - /** * Verifies that the behaviour tested in {@link #testUserWithNoRolesCanPerformRemoteSearch} * does not work for requests that are not remote-index-capable. @@ -341,24 +401,24 @@ public void testRemoteIndicesOnlyWorkWithApplicableRequestTypes() { final Authentication authentication = createAuthentication(new User("test user")); mockEmptyMetaData(); assertThrowsAuthorizationException( - () -> authorize(authentication, DeleteIndexAction.NAME, request), - DeleteIndexAction.NAME, "test user"); + () -> authorize(authentication, DeleteIndexAction.NAME, request), + DeleteIndexAction.NAME, "test user"); verify(auditTrail).accessDenied(authentication, DeleteIndexAction.NAME, request, Role.EMPTY.names()); verifyNoMoreInteractions(auditTrail); } public void testUnknownRoleCausesDenial() { Tuple tuple = randomFrom(asList( - new Tuple<>(SearchAction.NAME, new SearchRequest()), - new Tuple<>(IndicesExistsAction.NAME, new IndicesExistsRequest()), - new Tuple<>(SqlQueryAction.NAME, new SqlQueryRequest()))); + new Tuple<>(SearchAction.NAME, new SearchRequest()), + new Tuple<>(IndicesExistsAction.NAME, new IndicesExistsRequest()), + new Tuple<>(SqlQueryAction.NAME, new SqlQueryRequest()))); String action = tuple.v1(); TransportRequest request = tuple.v2(); final Authentication authentication = createAuthentication(new User("test user", "non-existent-role")); mockEmptyMetaData(); assertThrowsAuthorizationException( - () -> authorize(authentication, action, request), - action, "test user"); + () -> authorize(authentication, action, request), + action, "test user"); verify(auditTrail).accessDenied(authentication, action, request, Role.EMPTY.names()); verifyNoMoreInteractions(auditTrail); } @@ -367,22 +427,22 @@ public void testThatNonIndicesAndNonClusterActionIsDenied() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(new User("test user", "a_all")); final RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); roleMap.put("a_all", role); assertThrowsAuthorizationException( - () -> authorize(authentication, "whatever", request), - "whatever", "test user"); - verify(auditTrail).accessDenied(authentication, "whatever", request, new String[] { role.getName() }); + () -> authorize(authentication, "whatever", request), + "whatever", "test user"); + verify(auditTrail).accessDenied(authentication, "whatever", request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } public void testThatRoleWithNoIndicesIsDenied() { @SuppressWarnings("unchecked") Tuple tuple = randomFrom( - new Tuple<>(SearchAction.NAME, new SearchRequest()), - new Tuple<>(IndicesExistsAction.NAME, new IndicesExistsRequest()), - new Tuple<>(SqlQueryAction.NAME, new SqlQueryRequest())); + new Tuple<>(SearchAction.NAME, new SearchRequest()), + new Tuple<>(IndicesExistsAction.NAME, new IndicesExistsRequest()), + new Tuple<>(SqlQueryAction.NAME, new SqlQueryRequest())); String action = tuple.v1(); TransportRequest request = tuple.v2(); final Authentication authentication = createAuthentication(new User("test user", "no_indices")); @@ -391,9 +451,9 @@ public void testThatRoleWithNoIndicesIsDenied() { mockEmptyMetaData(); assertThrowsAuthorizationException( - () -> authorize(authentication, action, request), - action, "test user"); - verify(auditTrail).accessDenied(authentication, action, request, new String[] { role.getName() }); + () -> authorize(authentication, action, request), + action, "test user"); + verify(auditTrail).accessDenied(authentication, action, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } @@ -402,12 +462,12 @@ public void testElasticUserAuthorizedForNonChangePasswordRequestsWhenNotInSetupM final Tuple request = randomCompositeRequest(); authorize(authentication, request.v1(), request.v2()); - verify(auditTrail).accessGranted(authentication, request.v1(), request.v2(), new String[] { ElasticUser.ROLE_NAME }); + verify(auditTrail).accessGranted(authentication, request.v1(), request.v2(), new String[]{ElasticUser.ROLE_NAME}); } public void testSearchAgainstEmptyCluster() { RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); mockEmptyMetaData(); @@ -415,25 +475,25 @@ public void testSearchAgainstEmptyCluster() { { //ignore_unavailable set to false, user is not authorized for this index nor does it exist SearchRequest searchRequest = new SearchRequest("does_not_exist") - .indicesOptions(IndicesOptions.fromOptions(false, true, - true, false)); + .indicesOptions(IndicesOptions.fromOptions(false, true, + true, false)); assertThrowsAuthorizationException( - () -> authorize(authentication, SearchAction.NAME, searchRequest), - SearchAction.NAME, "test user"); - verify(auditTrail).accessDenied(authentication, SearchAction.NAME, searchRequest, new String[] { role.getName() }); + () -> authorize(authentication, SearchAction.NAME, searchRequest), + SearchAction.NAME, "test user"); + verify(auditTrail).accessDenied(authentication, SearchAction.NAME, searchRequest, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } { //ignore_unavailable and allow_no_indices both set to true, user is not authorized for this index nor does it exist SearchRequest searchRequest = new SearchRequest("does_not_exist") - .indicesOptions(IndicesOptions.fromOptions(true, true, true, false)); + .indicesOptions(IndicesOptions.fromOptions(true, true, true, false)); authorize(authentication, SearchAction.NAME, searchRequest); - verify(auditTrail).accessGranted(authentication, SearchAction.NAME, searchRequest, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, SearchAction.NAME, searchRequest, new String[]{role.getName()}); final IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); final IndicesAccessControl.IndexAccessControl indexAccessControl = - indicesAccessControl.getIndexPermissions(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER); + indicesAccessControl.getIndexPermissions(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER); assertFalse(indexAccessControl.getFieldPermissions().hasFieldLevelSecurity()); assertNull(indexAccessControl.getQueries()); } @@ -441,40 +501,40 @@ public void testSearchAgainstEmptyCluster() { public void testScrollRelatedRequestsAllowed() { RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); mockEmptyMetaData(); final ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); authorize(authentication, ClearScrollAction.NAME, clearScrollRequest); - verify(auditTrail).accessGranted(authentication, ClearScrollAction.NAME, clearScrollRequest, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, ClearScrollAction.NAME, clearScrollRequest, new String[]{role.getName()}); final SearchScrollRequest searchScrollRequest = new SearchScrollRequest(); authorize(authentication, SearchScrollAction.NAME, searchScrollRequest); - verify(auditTrail).accessGranted(authentication, SearchScrollAction.NAME, searchScrollRequest, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, SearchScrollAction.NAME, searchScrollRequest, new String[]{role.getName()}); // We have to use a mock request for other Scroll actions as the actual requests are package private to SearchTransportService final TransportRequest request = mock(TransportRequest.class); authorize(authentication, SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME, request); verify(auditTrail).accessGranted(authentication, SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME, request, - new String[] { role.getName() }); + new String[]{role.getName()}); authorize(authentication, SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME, request); verify(auditTrail).accessGranted(authentication, SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME, request, - new String[] { role.getName() }); + new String[]{role.getName()}); authorize(authentication, SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME, request); verify(auditTrail).accessGranted(authentication, SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME, request, - new String[] { role.getName() }); + new String[]{role.getName()}); authorize(authentication, SearchTransportService.QUERY_SCROLL_ACTION_NAME, request); verify(auditTrail).accessGranted(authentication, SearchTransportService.QUERY_SCROLL_ACTION_NAME, request, - new String[] { role.getName() }); + new String[]{role.getName()}); authorize(authentication, SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME, request); verify(auditTrail).accessGranted(authentication, SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME, request, - new String[] { role.getName() }); + new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } @@ -482,14 +542,14 @@ public void testAuthorizeIndicesFailures() { TransportRequest request = new GetIndexRequest().indices("b"); ClusterState state = mockEmptyMetaData(); RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); assertThrowsAuthorizationException( - () -> authorize(authentication, "indices:a", request), - "indices:a", "test user"); - verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[] { role.getName() }); + () -> authorize(authentication, "indices:a", request), + "indices:a", "test user"); + verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); verify(clusterService, times(1)).state(); verify(state, times(1)).metaData(); @@ -500,14 +560,14 @@ public void testCreateIndexWithAliasWithoutPermissions() { request.alias(new Alias("a2")); ClusterState state = mockEmptyMetaData(); RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); assertThrowsAuthorizationException( - () -> authorize(authentication, CreateIndexAction.NAME, request), - IndicesAliasesAction.NAME, "test user"); - verify(auditTrail).accessDenied(authentication, IndicesAliasesAction.NAME, request, new String[] { role.getName() }); + () -> authorize(authentication, CreateIndexAction.NAME, request), + IndicesAliasesAction.NAME, "test user"); + verify(auditTrail).accessDenied(authentication, IndicesAliasesAction.NAME, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); verify(clusterService).state(); verify(state, times(1)).metaData(); @@ -518,13 +578,13 @@ public void testCreateIndexWithAlias() { request.alias(new Alias("a2")); ClusterState state = mockEmptyMetaData(); RoleDescriptor role = new RoleDescriptor("a_all", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a", "a2").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a", "a2").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); authorize(authentication, CreateIndexAction.NAME, request); - verify(auditTrail).accessGranted(authentication, CreateIndexAction.NAME, request, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, CreateIndexAction.NAME, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); verify(clusterService).state(); verify(state, times(1)).metaData(); @@ -536,17 +596,17 @@ public void testDenialForAnonymousUser() { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "a_all").build(); final AnonymousUser anonymousUser = new AnonymousUser(settings); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); + new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); RoleDescriptor role = new RoleDescriptor("a_all", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); roleMap.put("a_all", role); final Authentication authentication = createAuthentication(anonymousUser); assertThrowsAuthorizationException( - () -> authorize(authentication, "indices:a", request), - "indices:a", anonymousUser.principal()); - verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[] { role.getName() }); + () -> authorize(authentication, "indices:a", request), + "indices:a", anonymousUser.principal()); + verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); verify(clusterService, times(1)).state(); verify(state, times(1)).metaData(); @@ -556,21 +616,21 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() { TransportRequest request = new GetIndexRequest().indices("b"); ClusterState state = mockEmptyMetaData(); Settings settings = Settings.builder() - .put(AnonymousUser.ROLES_SETTING.getKey(), "a_all") - .put(AuthorizationService.ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING.getKey(), false) - .build(); + .put(AnonymousUser.ROLES_SETTING.getKey(), "a_all") + .put(AuthorizationService.ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING.getKey(), false) + .build(); final Authentication authentication = createAuthentication(new AnonymousUser(settings)); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings)); + new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings)); RoleDescriptor role = new RoleDescriptor("a_all", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); roleMap.put("a_all", role); final ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, - () -> authorize(authentication, "indices:a", request)); + () -> authorize(authentication, "indices:a", request)); assertAuthenticationException(securityException, containsString("action [indices:a] requires authentication")); - verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[] { role.getName() }); + verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); verify(clusterService, times(1)).state(); verify(state, times(1)).metaData(); @@ -581,16 +641,16 @@ public void testAuditTrailIsRecordedWhenIndexWildcardThrowsError() { TransportRequest request = new GetIndexRequest().indices("not-an-index-*").indicesOptions(options); ClusterState state = mockEmptyMetaData(); RoleDescriptor role = new RoleDescriptor("a_all", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); final IndexNotFoundException nfe = expectThrows( - IndexNotFoundException.class, - () -> authorize(authentication, GetIndexAction.NAME, request)); + IndexNotFoundException.class, + () -> authorize(authentication, GetIndexAction.NAME, request)); assertThat(nfe.getIndex(), is(notNullValue())); assertThat(nfe.getIndex().getName(), is("not-an-index-*")); - verify(auditTrail).accessDenied(authentication, GetIndexAction.NAME, request, new String[] { role.getName() }); + verify(auditTrail).accessDenied(authentication, GetIndexAction.NAME, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); verify(clusterService).state(); verify(state, times(1)).metaData(); @@ -601,8 +661,8 @@ public void testRunAsRequestWithNoRolesUser() { final Authentication authentication = createAuthentication(new User("run as me", null, new User("test user", "admin"))); assertNotEquals(authentication.getUser().authenticatedUser(), authentication); assertThrowsAuthorizationExceptionRunAs( - () -> authorize(authentication, "indices:a", request), - "indices:a", "test user", "run as me"); // run as [run as me] + () -> authorize(authentication, "indices:a", request), + "indices:a", "test user", "run as me"); // run as [run as me] verify(auditTrail).runAsDenied(authentication, "indices:a", request, Role.EMPTY.names()); verifyNoMoreInteractions(auditTrail); } @@ -610,67 +670,67 @@ public void testRunAsRequestWithNoRolesUser() { public void testRunAsRequestWithoutLookedUpBy() { AuthenticateRequest request = new AuthenticateRequest("run as me"); roleMap.put("can run as", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); - User user = new User("run as me", Strings.EMPTY_ARRAY, new User("test user", new String[] { "can run as" })); + User user = new User("run as me", Strings.EMPTY_ARRAY, new User("test user", new String[]{"can run as"})); Authentication authentication = new Authentication(user, new RealmRef("foo", "bar", "baz"), null); assertNotEquals(user.authenticatedUser(), user); assertThrowsAuthorizationExceptionRunAs( - () -> authorize(authentication, AuthenticateAction.NAME, request), - AuthenticateAction.NAME, "test user", "run as me"); // run as [run as me] + () -> authorize(authentication, AuthenticateAction.NAME, request), + AuthenticateAction.NAME, "test user", "run as me"); // run as [run as me] verify(auditTrail).runAsDenied(authentication, AuthenticateAction.NAME, request, - new String[] { ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName() }); + new String[]{ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()}); verifyNoMoreInteractions(auditTrail); } public void testRunAsRequestRunningAsUnAllowedUser() { TransportRequest request = mock(TransportRequest.class); - User user = new User("run as me", new String[] { "doesn't exist" }, new User("test user", "can run as")); + User user = new User("run as me", new String[]{"doesn't exist"}, new User("test user", "can run as")); assertNotEquals(user.authenticatedUser(), user); final Authentication authentication = createAuthentication(user); final RoleDescriptor role = new RoleDescriptor("can run as", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, - new String[] { "not the right user" }); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, + new String[]{"not the right user"}); roleMap.put("can run as", role); assertThrowsAuthorizationExceptionRunAs( - () -> authorize(authentication, "indices:a", request), - "indices:a", "test user", "run as me"); - verify(auditTrail).runAsDenied(authentication, "indices:a", request, new String[] { role.getName() }); + () -> authorize(authentication, "indices:a", request), + "indices:a", "test user", "run as me"); + verify(auditTrail).runAsDenied(authentication, "indices:a", request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } public void testRunAsRequestWithRunAsUserWithoutPermission() { TransportRequest request = new GetIndexRequest().indices("a"); User authenticatedUser = new User("test user", "can run as"); - User user = new User("run as me", new String[] { "b" }, authenticatedUser); + User user = new User("run as me", new String[]{"b"}, authenticatedUser); assertNotEquals(user.authenticatedUser(), user); final Authentication authentication = createAuthentication(user); final RoleDescriptor runAsRole = new RoleDescriptor("can run as", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, - new String[] { "run as me" }); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, + new String[]{"run as me"}); roleMap.put("can run as", runAsRole); RoleDescriptor bRole = new RoleDescriptor("b", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("b").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("b").privileges("all").build()}, null); boolean indexExists = randomBoolean(); if (indexExists) { ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() - .put(new IndexMetaData.Builder("a") - .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) - .numberOfShards(1).numberOfReplicas(0).build(), true) - .build()); + .put(new IndexMetaData.Builder("a") + .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) + .numberOfShards(1).numberOfReplicas(0).build(), true) + .build()); roleMap.put("b", bRole); } else { mockEmptyMetaData(); } assertThrowsAuthorizationExceptionRunAs( - () -> authorize(authentication, "indices:a", request), - "indices:a", "test user", "run as me"); - verify(auditTrail).runAsGranted(authentication, "indices:a", request, new String[] { runAsRole.getName() }); + () -> authorize(authentication, "indices:a", request), + "indices:a", "test user", "run as me"); + verify(auditTrail).runAsGranted(authentication, "indices:a", request, new String[]{runAsRole.getName()}); if (indexExists) { - verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[] { bRole.getName() }); + verify(auditTrail).accessDenied(authentication, "indices:a", request, new String[]{bRole.getName()}); } else { verify(auditTrail).accessDenied(authentication, "indices:a", request, Role.EMPTY.names()); } @@ -679,43 +739,43 @@ public void testRunAsRequestWithRunAsUserWithoutPermission() { public void testRunAsRequestWithValidPermissions() { TransportRequest request = new GetIndexRequest().indices("b"); - User authenticatedUser = new User("test user", new String[] { "can run as" }); - User user = new User("run as me", new String[] { "b" }, authenticatedUser); + User authenticatedUser = new User("test user", new String[]{"can run as"}); + User user = new User("run as me", new String[]{"b"}, authenticatedUser); assertNotEquals(user.authenticatedUser(), user); final Authentication authentication = createAuthentication(user); final RoleDescriptor runAsRole = new RoleDescriptor("can run as", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, - new String[] { "run as me" }); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, + new String[]{"run as me"}); roleMap.put("can run as", runAsRole); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() - .put(new IndexMetaData.Builder("b") - .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) - .numberOfShards(1).numberOfReplicas(0).build(), true) - .build()); + .put(new IndexMetaData.Builder("b") + .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) + .numberOfShards(1).numberOfReplicas(0).build(), true) + .build()); RoleDescriptor bRole = new RoleDescriptor("b", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("b").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("b").privileges("all").build()}, null); roleMap.put("b", bRole); authorize(authentication, "indices:a", request); - verify(auditTrail).runAsGranted(authentication, "indices:a", request, new String[] { runAsRole.getName() }); - verify(auditTrail).accessGranted(authentication, "indices:a", request, new String[] { bRole.getName() }); + verify(auditTrail).runAsGranted(authentication, "indices:a", request, new String[]{runAsRole.getName()}); + verify(auditTrail).accessGranted(authentication, "indices:a", request, new String[]{bRole.getName()}); verifyNoMoreInteractions(auditTrail); } public void testNonXPackUserCannotExecuteOperationAgainstSecurityIndex() { - RoleDescriptor role = new RoleDescriptor("all access", new String[] { "all" }, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("all").build() }, null); + RoleDescriptor role = new RoleDescriptor("all access", new String[]{"all"}, + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("*").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("all_access_user", "all_access")); roleMap.put("all_access", role); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() - .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) - .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) - .numberOfShards(1).numberOfReplicas(0).build(), true) - .build()); + .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) + .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) + .numberOfShards(1).numberOfReplicas(0).build(), true) + .build()); List> requests = new ArrayList<>(); requests.add(new Tuple<>(BulkAction.NAME + "[s]", @@ -726,34 +786,34 @@ public void testNonXPackUserCannotExecuteOperationAgainstSecurityIndex() { new IndexRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(SearchAction.NAME, new SearchRequest(SECURITY_INDEX_NAME))); requests.add(new Tuple<>(TermVectorsAction.NAME, - new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); + new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(GetAction.NAME, new GetRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(TermVectorsAction.NAME, - new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); + new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndicesAliasesAction.NAME, new IndicesAliasesRequest() - .addAliasAction(AliasActions.add().alias("security_alias").index(SECURITY_INDEX_NAME)))); + .addAliasAction(AliasActions.add().alias("security_alias").index(SECURITY_INDEX_NAME)))); requests.add( - new Tuple<>(UpdateSettingsAction.NAME, new UpdateSettingsRequest().indices(SECURITY_INDEX_NAME))); + new Tuple<>(UpdateSettingsAction.NAME, new UpdateSettingsRequest().indices(SECURITY_INDEX_NAME))); for (Tuple requestTuple : requests) { String action = requestTuple.v1(); TransportRequest request = requestTuple.v2(); assertThrowsAuthorizationException( - () -> authorize(authentication, action, request), - action, "all_access_user"); - verify(auditTrail).accessDenied(authentication, action, request, new String[] { role.getName() }); + () -> authorize(authentication, action, request), + action, "all_access_user"); + verify(auditTrail).accessDenied(authentication, action, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } // we should allow waiting for the health of the index or any index if the user has this permission ClusterHealthRequest request = new ClusterHealthRequest(SECURITY_INDEX_NAME); authorize(authentication, ClusterHealthAction.NAME, request); - verify(auditTrail).accessGranted(authentication, ClusterHealthAction.NAME, request, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, ClusterHealthAction.NAME, request, new String[]{role.getName()}); // multiple indices request = new ClusterHealthRequest(SECURITY_INDEX_NAME, "foo", "bar"); authorize(authentication, ClusterHealthAction.NAME, request); - verify(auditTrail).accessGranted(authentication, ClusterHealthAction.NAME, request, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, ClusterHealthAction.NAME, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); final SearchRequest searchRequest = new SearchRequest("_all"); @@ -763,17 +823,17 @@ public void testNonXPackUserCannotExecuteOperationAgainstSecurityIndex() { } public void testGrantedNonXPackUserCanExecuteMonitoringOperationsAgainstSecurityIndex() { - RoleDescriptor role = new RoleDescriptor("all access", new String[] { "all" }, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("all").build() }, null); + RoleDescriptor role = new RoleDescriptor("all access", new String[]{"all"}, + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("*").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("all_access_user", "all_access")); roleMap.put("all_access", role); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() - .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) - .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) - .numberOfShards(1).numberOfReplicas(0).build(), true) - .build()); + .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) + .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) + .numberOfShards(1).numberOfReplicas(0).build(), true) + .build()); List> requests = new ArrayList<>(); requests.add(new Tuple<>(IndicesStatsAction.NAME, new IndicesStatsRequest().indices(SECURITY_INDEX_NAME))); @@ -781,15 +841,15 @@ public void testGrantedNonXPackUserCanExecuteMonitoringOperationsAgainstSecurity requests.add(new Tuple<>(IndicesSegmentsAction.NAME, new IndicesSegmentsRequest().indices(SECURITY_INDEX_NAME))); requests.add(new Tuple<>(GetSettingsAction.NAME, new GetSettingsRequest().indices(SECURITY_INDEX_NAME))); requests.add(new Tuple<>(IndicesShardStoresAction.NAME, - new IndicesShardStoresRequest().indices(SECURITY_INDEX_NAME))); + new IndicesShardStoresRequest().indices(SECURITY_INDEX_NAME))); requests.add(new Tuple<>(UpgradeStatusAction.NAME, - new UpgradeStatusRequest().indices(SECURITY_INDEX_NAME))); + new UpgradeStatusRequest().indices(SECURITY_INDEX_NAME))); for (final Tuple requestTuple : requests) { final String action = requestTuple.v1(); final TransportRequest request = requestTuple.v2(); authorize(authentication, action, request); - verify(auditTrail).accessGranted(authentication, action, request, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, action, request, new String[]{role.getName()}); } } @@ -799,33 +859,33 @@ public void testSuperusersCanExecuteOperationAgainstSecurityIndex() { ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() - .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) - .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) - .numberOfShards(1).numberOfReplicas(0).build(), true) - .build()); + .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) + .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) + .numberOfShards(1).numberOfReplicas(0).build(), true) + .build()); List> requests = new ArrayList<>(); requests.add(new Tuple<>(DeleteAction.NAME, new DeleteRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", - createBulkShardRequest(SECURITY_INDEX_NAME, DeleteRequest::new))); + createBulkShardRequest(SECURITY_INDEX_NAME, DeleteRequest::new))); requests.add(new Tuple<>(UpdateAction.NAME, new UpdateRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndexAction.NAME, new IndexRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", - createBulkShardRequest(SECURITY_INDEX_NAME, IndexRequest::new))); + createBulkShardRequest(SECURITY_INDEX_NAME, IndexRequest::new))); requests.add(new Tuple<>(SearchAction.NAME, new SearchRequest(SECURITY_INDEX_NAME))); requests.add(new Tuple<>(TermVectorsAction.NAME, - new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); + new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(GetAction.NAME, new GetRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(TermVectorsAction.NAME, - new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); + new TermVectorsRequest(SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndicesAliasesAction.NAME, new IndicesAliasesRequest() - .addAliasAction(AliasActions.add().alias("security_alias").index(SECURITY_INDEX_NAME)))); + .addAliasAction(AliasActions.add().alias("security_alias").index(SECURITY_INDEX_NAME)))); requests.add(new Tuple<>(ClusterHealthAction.NAME, new ClusterHealthRequest(SECURITY_INDEX_NAME))); requests.add(new Tuple<>(ClusterHealthAction.NAME, - new ClusterHealthRequest(SECURITY_INDEX_NAME, "foo", "bar"))); + new ClusterHealthRequest(SECURITY_INDEX_NAME, "foo", "bar"))); for (final Tuple requestTuple : requests) { final String action = requestTuple.v1(); @@ -843,10 +903,10 @@ public void testSuperusersCanExecuteOperationAgainstSecurityIndexWithWildcard() ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() - .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) - .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) - .numberOfShards(1).numberOfReplicas(0).build(), true) - .build()); + .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME) + .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) + .numberOfShards(1).numberOfReplicas(0).build(), true) + .build()); String action = SearchAction.NAME; SearchRequest request = new SearchRequest("_all"); @@ -860,9 +920,9 @@ public void testAnonymousRolesAreAppliedToOtherUsers() { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "anonymous_user_role").build(); final AnonymousUser anonymousUser = new AnonymousUser(settings); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); - roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role", new String[] { "all" }, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); + new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); + roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role", new String[]{"all"}, + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null)); mockEmptyMetaData(); // sanity check the anonymous user @@ -886,9 +946,9 @@ public void testAnonymousUserEnabledRoleAdded() { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "anonymous_user_role").build(); final AnonymousUser anonymousUser = new AnonymousUser(settings); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); - roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role", new String[] { "all" }, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); + new DefaultAuthenticationFailureHandler(), threadPool, anonymousUser); + roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role", new String[]{"all"}, + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null)); mockEmptyMetaData(); PlainActionFuture rolesFuture = new PlainActionFuture<>(); authorizationService.roles(new User("no role user"), rolesFuture); @@ -905,8 +965,8 @@ public void testCompositeActionsAreImmediatelyRejected() { final RoleDescriptor role = new RoleDescriptor("no_indices", null, null, null); roleMap.put("no_indices", role); assertThrowsAuthorizationException( - () -> authorize(authentication, action, request), action, "test user"); - verify(auditTrail).accessDenied(authentication, action, request, new String[] { role.getName() }); + () -> authorize(authentication, action, request), action, "test user"); + verify(auditTrail).accessDenied(authentication, action, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } @@ -917,11 +977,11 @@ public void testCompositeActionsIndicesAreNotChecked() { final TransportRequest request = compositeRequest.v2(); final Authentication authentication = createAuthentication(new User("test user", "role")); final RoleDescriptor role = new RoleDescriptor("role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices(randomBoolean() ? "a" : "index").privileges("all").build() }, - null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices(randomBoolean() ? "a" : "index").privileges("all").build()}, + null); roleMap.put("role", role); authorize(authentication, action, request); - verify(auditTrail).accessGranted(authentication, action, request, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, action, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } @@ -930,10 +990,10 @@ public void testCompositeActionsMustImplementCompositeIndicesRequest() { TransportRequest request = mock(TransportRequest.class); User user = new User("test user", "role"); roleMap.put("role", new RoleDescriptor("role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices(randomBoolean() ? "a" : "index").privileges("all").build() }, - null)); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices(randomBoolean() ? "a" : "index").privileges("all").build()}, + null)); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, - () -> authorize(createAuthentication(user), action, request)); + () -> authorize(createAuthentication(user), action, request)); assertThat(illegalStateException.getMessage(), containsString("Composite actions must implement CompositeIndicesRequest")); } @@ -970,62 +1030,62 @@ public void testCompositeActionsIndicesAreCheckedAtTheShardLevel() { User userAllowed = new User("userAllowed", "roleAllowed"); roleMap.put("roleAllowed", new RoleDescriptor("roleAllowed", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("index").privileges("all").build() }, null)); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("index").privileges("all").build()}, null)); User userDenied = new User("userDenied", "roleDenied"); roleMap.put("roleDenied", new RoleDescriptor("roleDenied", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null)); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null)); mockEmptyMetaData(); authorize(createAuthentication(userAllowed), action, request); assertThrowsAuthorizationException( - () -> authorize(createAuthentication(userDenied), action, request), action, "userDenied"); + () -> authorize(createAuthentication(userDenied), action, request), action, "userDenied"); } public void testAuthorizationOfIndividualBulkItems() { final String action = BulkAction.NAME + "[s]"; final BulkItemRequest[] items = { - new BulkItemRequest(1, new DeleteRequest("concrete-index", "doc", "c1")), - new BulkItemRequest(2, new IndexRequest("concrete-index", "doc", "c2")), - new BulkItemRequest(3, new DeleteRequest("alias-1", "doc", "a1a")), - new BulkItemRequest(4, new IndexRequest("alias-1", "doc", "a1b")), - new BulkItemRequest(5, new DeleteRequest("alias-2", "doc", "a2a")), - new BulkItemRequest(6, new IndexRequest("alias-2", "doc", "a2b")) + new BulkItemRequest(1, new DeleteRequest("concrete-index", "doc", "c1")), + new BulkItemRequest(2, new IndexRequest("concrete-index", "doc", "c2")), + new BulkItemRequest(3, new DeleteRequest("alias-1", "doc", "a1a")), + new BulkItemRequest(4, new IndexRequest("alias-1", "doc", "a1b")), + new BulkItemRequest(5, new DeleteRequest("alias-2", "doc", "a2a")), + new BulkItemRequest(6, new IndexRequest("alias-2", "doc", "a2b")) }; final ShardId shardId = new ShardId("concrete-index", UUID.randomUUID().toString(), 1); final TransportRequest request = new BulkShardRequest(shardId, WriteRequest.RefreshPolicy.IMMEDIATE, items); final Authentication authentication = createAuthentication(new User("user", "my-role")); - RoleDescriptor role = new RoleDescriptor("my-role", null, new IndicesPrivileges[] { - IndicesPrivileges.builder().indices("concrete-index").privileges("all").build(), - IndicesPrivileges.builder().indices("alias-1").privileges("index").build(), - IndicesPrivileges.builder().indices("alias-2").privileges("delete").build() + RoleDescriptor role = new RoleDescriptor("my-role", null, new IndicesPrivileges[]{ + IndicesPrivileges.builder().indices("concrete-index").privileges("all").build(), + IndicesPrivileges.builder().indices("alias-1").privileges("index").build(), + IndicesPrivileges.builder().indices("alias-2").privileges("delete").build() }, null); roleMap.put("my-role", role); mockEmptyMetaData(); authorize(authentication, action, request); - verify(auditTrail).accessDenied(authentication, DeleteAction.NAME, request, new String[] { role.getName() }); // alias-1 delete - verify(auditTrail).accessDenied(authentication, IndexAction.NAME, request, new String[] { role.getName() }); // alias-2 index - verify(auditTrail).accessGranted(authentication, action, request, new String[] { role.getName() }); // bulk request is allowed + verify(auditTrail).accessDenied(authentication, DeleteAction.NAME, request, new String[]{role.getName()}); // alias-1 delete + verify(auditTrail).accessDenied(authentication, IndexAction.NAME, request, new String[]{role.getName()}); // alias-2 index + verify(auditTrail).accessGranted(authentication, action, request, new String[]{role.getName()}); // bulk request is allowed verifyNoMoreInteractions(auditTrail); } public void testAuthorizationOfIndividualBulkItemsWithDateMath() { final String action = BulkAction.NAME + "[s]"; final BulkItemRequest[] items = { - new BulkItemRequest(1, new IndexRequest("", "doc", "dy1")), - new BulkItemRequest(2, - new DeleteRequest("", "doc", "dy2")), // resolves to same as above - new BulkItemRequest(3, new IndexRequest("", "doc", "dm1")), - new BulkItemRequest(4, - new DeleteRequest("", "doc", "dm2")), // resolves to same as above + new BulkItemRequest(1, new IndexRequest("", "doc", "dy1")), + new BulkItemRequest(2, + new DeleteRequest("", "doc", "dy2")), // resolves to same as above + new BulkItemRequest(3, new IndexRequest("", "doc", "dm1")), + new BulkItemRequest(4, + new DeleteRequest("", "doc", "dm2")), // resolves to same as above }; final ShardId shardId = new ShardId("concrete-index", UUID.randomUUID().toString(), 1); final TransportRequest request = new BulkShardRequest(shardId, WriteRequest.RefreshPolicy.IMMEDIATE, items); final Authentication authentication = createAuthentication(new User("user", "my-role")); final RoleDescriptor role = new RoleDescriptor("my-role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("datemath-*").privileges("index").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("datemath-*").privileges("index").build()}, null); roleMap.put("my-role", role); mockEmptyMetaData(); @@ -1033,14 +1093,14 @@ public void testAuthorizationOfIndividualBulkItemsWithDateMath() { // both deletes should fail verify(auditTrail, Mockito.times(2)).accessDenied(authentication, DeleteAction.NAME, request, - new String[] { role.getName() }); + new String[]{role.getName()}); // bulk request is allowed - verify(auditTrail).accessGranted(authentication, action, request, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, action, request, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } private BulkShardRequest createBulkShardRequest(String indexName, TriFunction> req) { - final BulkItemRequest[] items = { new BulkItemRequest(1, req.apply(indexName, "type", "id")) }; + final BulkItemRequest[] items = {new BulkItemRequest(1, req.apply(indexName, "type", "id"))}; return new BulkShardRequest(new ShardId(indexName, UUID.randomUUID().toString(), 1), WriteRequest.RefreshPolicy.IMMEDIATE, items); } @@ -1049,37 +1109,37 @@ public void testSameUserPermission() { final User user = new User("joe"); final boolean changePasswordRequest = randomBoolean(); final TransportRequest request = changePasswordRequest ? - new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request() : - new AuthenticateRequestBuilder(mock(Client.class)).username(user.principal()).request(); + new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(user.principal()).request(); final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; final Authentication authentication = mock(Authentication.class); final RealmRef authenticatedBy = mock(RealmRef.class); when(authentication.getUser()).thenReturn(user); when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); when(authenticatedBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : - randomAlphaOfLengthBetween(4, 12)); + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); assertThat(request, instanceOf(UserRequest.class)); assertTrue(AuthorizationService.checkSameUserPermissions(action, request, authentication)); } public void testSameUserPermissionDoesNotAllowNonMatchingUsername() { - final User authUser = new User("admin", new String[] { "bar" }); + final User authUser = new User("admin", new String[]{"bar"}); final User user = new User("joe", null, authUser); final boolean changePasswordRequest = randomBoolean(); final String username = randomFrom("", "joe" + randomAlphaOfLengthBetween(1, 5), randomAlphaOfLengthBetween(3, 10)); final TransportRequest request = changePasswordRequest ? - new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : - new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); + new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; final Authentication authentication = mock(Authentication.class); final RealmRef authenticatedBy = mock(RealmRef.class); when(authentication.getUser()).thenReturn(user); when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); when(authenticatedBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : - randomAlphaOfLengthBetween(4, 12)); + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); assertThat(request, instanceOf(UserRequest.class)); assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); @@ -1088,8 +1148,8 @@ public void testSameUserPermissionDoesNotAllowNonMatchingUsername() { final RealmRef lookedUpBy = mock(RealmRef.class); when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); when(lookedUpBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : - randomAlphaOfLengthBetween(4, 12)); + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); // this should still fail since the username is still different assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); @@ -1105,7 +1165,7 @@ public void testSameUserPermissionDoesNotAllowOtherActions() { final User user = mock(User.class); final TransportRequest request = mock(TransportRequest.class); final String action = randomFrom(PutUserAction.NAME, DeleteUserAction.NAME, ClusterHealthAction.NAME, ClusterStateAction.NAME, - ClusterStatsAction.NAME, GetLicenseAction.NAME); + ClusterStatsAction.NAME, GetLicenseAction.NAME); final Authentication authentication = mock(Authentication.class); final RealmRef authenticatedBy = mock(RealmRef.class); final boolean runAs = randomBoolean(); @@ -1114,20 +1174,20 @@ public void testSameUserPermissionDoesNotAllowOtherActions() { when(user.isRunAs()).thenReturn(runAs); when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); when(authenticatedBy.getType()) - .thenReturn(randomAlphaOfLengthBetween(4, 12)); + .thenReturn(randomAlphaOfLengthBetween(4, 12)); assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); verifyZeroInteractions(user, request, authentication); } public void testSameUserPermissionRunAsChecksAuthenticatedBy() { - final User authUser = new User("admin", new String[] { "bar" }); + final User authUser = new User("admin", new String[]{"bar"}); final String username = "joe"; final User user = new User(username, null, authUser); final boolean changePasswordRequest = randomBoolean(); final TransportRequest request = changePasswordRequest ? - new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : - new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); + new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; final Authentication authentication = mock(Authentication.class); final RealmRef authenticatedBy = mock(RealmRef.class); @@ -1136,8 +1196,8 @@ public void testSameUserPermissionRunAsChecksAuthenticatedBy() { when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); when(lookedUpBy.getType()) - .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : - randomAlphaOfLengthBetween(4, 12)); + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); assertTrue(AuthorizationService.checkSameUserPermissions(action, request, authentication)); when(authentication.getUser()).thenReturn(authUser); @@ -1153,8 +1213,8 @@ public void testSameUserPermissionDoesNotAllowChangePasswordForOtherRealms() { when(authentication.getUser()).thenReturn(user); when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); when(authenticatedBy.getType()).thenReturn(randomFrom(LdapRealmSettings.LDAP_TYPE, FileRealmSettings.TYPE, - LdapRealmSettings.AD_TYPE, PkiRealmSettings.TYPE, - randomAlphaOfLengthBetween(4, 12))); + LdapRealmSettings.AD_TYPE, PkiRealmSettings.TYPE, + randomAlphaOfLengthBetween(4, 12))); assertThat(request, instanceOf(UserRequest.class)); assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); @@ -1165,7 +1225,7 @@ public void testSameUserPermissionDoesNotAllowChangePasswordForOtherRealms() { } public void testSameUserPermissionDoesNotAllowChangePasswordForLookedUpByOtherRealms() { - final User authUser = new User("admin", new String[] { "bar" }); + final User authUser = new User("admin", new String[]{"bar"}); final User user = new User("joe", null, authUser); final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request(); final String action = ChangePasswordAction.NAME; @@ -1176,8 +1236,8 @@ public void testSameUserPermissionDoesNotAllowChangePasswordForLookedUpByOtherRe when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); when(lookedUpBy.getType()).thenReturn(randomFrom(LdapRealmSettings.LDAP_TYPE, FileRealmSettings.TYPE, - LdapRealmSettings.AD_TYPE, PkiRealmSettings.TYPE, - randomAlphaOfLengthBetween(4, 12))); + LdapRealmSettings.AD_TYPE, PkiRealmSettings.TYPE, + randomAlphaOfLengthBetween(4, 12))); assertThat(request, instanceOf(UserRequest.class)); assertFalse(AuthorizationService.checkSameUserPermissions(action, request, authentication)); @@ -1223,7 +1283,7 @@ public void testDoesNotUseRolesStoreForXPackUser() { public void testGetRolesForSystemUserThrowsException() { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> authorizationService.roles(SystemUser.INSTANCE, - null)); + null)); assertEquals("the user [_system] is the system user and we should never try to get its roles", iae.getMessage()); } @@ -1245,9 +1305,9 @@ public void testProxyRequestFailsOnNonProxyAction() { TransportRequest transportRequest = TransportActionProxy.wrapRequest(node, request); User user = new User("test user", "role"); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, - () -> authorize(createAuthentication(user), "indices:some/action", transportRequest)); + () -> authorize(createAuthentication(user), "indices:some/action", transportRequest)); assertThat(illegalStateException.getMessage(), - startsWith("originalRequest is a proxy request for: [org.elasticsearch.transport.TransportRequest$")); + startsWith("originalRequest is a proxy request for: [org.elasticsearch.transport.TransportRequest$")); assertThat(illegalStateException.getMessage(), endsWith("] but action: [indices:some/action] isn't")); } @@ -1255,11 +1315,11 @@ public void testProxyRequestFailsOnNonProxyRequest() { TransportRequest request = TransportRequest.Empty.INSTANCE; User user = new User("test user", "role"); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, - () -> authorize(createAuthentication(user), TransportActionProxy.getProxyAction("indices:some/action"), request)); + () -> authorize(createAuthentication(user), TransportActionProxy.getProxyAction("indices:some/action"), request)); assertThat(illegalStateException.getMessage(), - startsWith("originalRequest is not a proxy request: [org.elasticsearch.transport.TransportRequest$")); + startsWith("originalRequest is not a proxy request: [org.elasticsearch.transport.TransportRequest$")); assertThat(illegalStateException.getMessage(), - endsWith("] but action: [internal:transport/proxy/indices:some/action] is a proxy action")); + endsWith("] but action: [internal:transport/proxy/indices:some/action] is a proxy action")); } public void testProxyRequestAuthenticationDenied() { @@ -1271,14 +1331,14 @@ public void testProxyRequestAuthenticationDenied() { final RoleDescriptor role = new RoleDescriptor("no_indices", null, null, null); roleMap.put("no_indices", role); assertThrowsAuthorizationException( - () -> authorize(authentication, action, transportRequest), action, "test user"); - verify(auditTrail).accessDenied(authentication, action, proxiedRequest, new String[] { role.getName() }); + () -> authorize(authentication, action, transportRequest), action, "test user"); + verify(auditTrail).accessDenied(authentication, action, proxiedRequest, new String[]{role.getName()}); verifyNoMoreInteractions(auditTrail); } public void testProxyRequestAuthenticationGrantedWithAllPrivileges() { RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); mockEmptyMetaData(); @@ -1288,12 +1348,12 @@ public void testProxyRequestAuthenticationGrantedWithAllPrivileges() { final TransportRequest transportRequest = TransportActionProxy.wrapRequest(node, clearScrollRequest); final String action = TransportActionProxy.getProxyAction(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); authorize(authentication, action, transportRequest); - verify(auditTrail).accessGranted(authentication, action, clearScrollRequest, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, action, clearScrollRequest, new String[]{role.getName()}); } public void testProxyRequestAuthenticationGranted() { RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("read_cross_cluster").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("read_cross_cluster").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); mockEmptyMetaData(); @@ -1303,13 +1363,13 @@ public void testProxyRequestAuthenticationGranted() { final TransportRequest transportRequest = TransportActionProxy.wrapRequest(node, clearScrollRequest); final String action = TransportActionProxy.getProxyAction(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); authorize(authentication, action, transportRequest); - verify(auditTrail).accessGranted(authentication, action, clearScrollRequest, new String[] { role.getName() }); + verify(auditTrail).accessGranted(authentication, action, clearScrollRequest, new String[]{role.getName()}); } public void testProxyRequestAuthenticationDeniedWithReadPrivileges() { final Authentication authentication = createAuthentication(new User("test user", "a_all")); final RoleDescriptor role = new RoleDescriptor("a_role", null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("read").build() }, null); + new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("read").build()}, null); roleMap.put("a_all", role); mockEmptyMetaData(); DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); @@ -1317,7 +1377,7 @@ public void testProxyRequestAuthenticationDeniedWithReadPrivileges() { TransportRequest transportRequest = TransportActionProxy.wrapRequest(node, clearScrollRequest); String action = TransportActionProxy.getProxyAction(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); assertThrowsAuthorizationException( - () -> authorize(authentication, action, transportRequest), action, "test user"); - verify(auditTrail).accessDenied(authentication, action, clearScrollRequest, new String[] { role.getName() }); + () -> authorize(authentication, action, transportRequest), action, "test user"); + verify(auditTrail).accessDenied(authentication, action, clearScrollRequest, new String[]{role.getName()}); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java index 1d0e5c179a9cd..c48ac4568989b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java @@ -7,6 +7,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -24,6 +25,7 @@ import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.List; +import java.util.Set; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -57,8 +59,10 @@ public void testAuthorizedIndicesUserWithSomeRoles() { .putAlias(new AliasMetaData.Builder("ba").build()) .build(), true) .build(); - Role roles = CompositeRolesStore.buildRoleFromDescriptors(Sets.newHashSet(aStarRole, bRole), - new FieldPermissionsCache(Settings.EMPTY)); + final PlainActionFuture future = new PlainActionFuture<>(); + final Set descriptors = Sets.newHashSet(aStarRole, bRole); + CompositeRolesStore.buildRoleFromDescriptors(descriptors, new FieldPermissionsCache(Settings.EMPTY), null, future); + Role roles = future.actionGet(); AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, roles, SearchAction.NAME, metaData); List list = authorizedIndices.get(); assertThat(list, containsInAnyOrder("a1", "a2", "aaaaaa", "b", "ab")); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index bd5acdec818ec..eed3297661a47 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -174,8 +174,9 @@ public void setup() { if (roleDescriptors.isEmpty()) { callback.onResponse(Role.EMPTY); } else { - callback.onResponse( - CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache)); + CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache, null, + ActionListener.wrap(r -> callback.onResponse(r), callback::onFailure) + ); } return Void.TYPE; }).when(rolesStore).roles(any(Set.class), any(FieldPermissionsCache.class), any(ActionListener.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java index 9d34382d566fb..07686838ad0e2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java @@ -10,18 +10,34 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; import org.elasticsearch.xpack.core.security.support.MetadataUtils; +import org.hamcrest.Matchers; +import java.util.Arrays; import java.util.Collections; +import java.util.LinkedHashSet; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; public class RoleDescriptorTests extends ESTestCase { @@ -45,9 +61,26 @@ public void testToString() throws Exception { .query("{\"query\": {\"match_all\": {}}}") .build() }; - RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, new String[] { "sudo" }); - assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none], indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], " + - "privileges=[read], field_security=[grant=[body,title], except=null], query={\"query\": {\"match_all\": {}}}],]" + + final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("my_app") + .privileges("read", "write") + .resources("*") + .build() + }; + + final ConditionalClusterPrivilege[] conditionalClusterPrivileges = new ConditionalClusterPrivilege[]{ + new ConditionalClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + }; + + RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, applicationPrivileges, + conditionalClusterPrivileges, new String[] { "sudo" }, Collections.emptyMap(), Collections.emptyMap()); + + assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none]" + + ", global=[{APPLICATION:manage:applications=app01,app02}]" + + ", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], privileges=[read]" + + ", field_security=[grant=[body,title], except=null], query={\"query\": {\"match_all\": {}}}],]" + + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" + ", runAs=[sudo], metadata=[{}]]")); } @@ -60,11 +93,23 @@ public void testToXContent() throws Exception { .query("{\"query\": {\"match_all\": {}}}") .build() }; + final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("my_app") + .privileges("read", "write") + .resources("*") + .build() + }; + final ConditionalClusterPrivilege[] conditionalClusterPrivileges = { + new ConditionalClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + }; + Map metadata = randomBoolean() ? MetadataUtils.DEFAULT_RESERVED_METADATA : null; - RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, new String[] { "sudo" }, metadata); + RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, applicationPrivileges, + conditionalClusterPrivileges, new String[]{ "sudo" }, metadata, Collections.emptyMap()); XContentBuilder builder = descriptor.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS); RoleDescriptor parsed = RoleDescriptor.parse("test", BytesReference.bytes(builder), false, XContentType.JSON); - assertEquals(parsed, descriptor); + assertThat(parsed, equalTo(descriptor)); } public void testParse() throws Exception { @@ -113,6 +158,53 @@ public void testParse() throws Exception { assertNotNull(rd.getMetadata()); assertThat(rd.getMetadata().size(), is(1)); assertThat(rd.getMetadata().get("foo"), is("bar")); + + q = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"]," + + " \"index\": [{\"names\": [\"idx1\",\"idx2\"], \"privileges\": [\"p1\", \"p2\"]}]," + + " \"applications\": [" + + " {\"resources\": [\"object-123\",\"object-456\"], \"privileges\":[\"read\", \"delete\"], \"application\":\"app1\"}," + + " {\"resources\": [\"*\"], \"privileges\":[\"admin\"], \"application\":\"app2\" }" + + " ]," + + " \"global\": { \"application\": { \"manage\": { \"applications\" : [ \"kibana\", \"logstash\" ] } } }" + + "}"; + rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + assertThat(rd.getName(), equalTo("test")); + assertThat(rd.getClusterPrivileges(), arrayContaining("a", "b")); + assertThat(rd.getIndicesPrivileges().length, equalTo(1)); + assertThat(rd.getIndicesPrivileges()[0].getIndices(), arrayContaining("idx1", "idx2")); + assertThat(rd.getRunAs(), arrayContaining("m", "n")); + assertThat(rd.getIndicesPrivileges()[0].getQuery(), nullValue()); + assertThat(rd.getApplicationPrivileges().length, equalTo(2)); + assertThat(rd.getApplicationPrivileges()[0].getResources(), arrayContaining("object-123", "object-456")); + assertThat(rd.getApplicationPrivileges()[0].getPrivileges(), arrayContaining("read", "delete")); + assertThat(rd.getApplicationPrivileges()[0].getApplication(), equalTo("app1")); + assertThat(rd.getApplicationPrivileges()[1].getResources(), arrayContaining("*")); + assertThat(rd.getApplicationPrivileges()[1].getPrivileges(), arrayContaining("admin")); + assertThat(rd.getApplicationPrivileges()[1].getApplication(), equalTo("app2")); + assertThat(rd.getConditionalClusterPrivileges(), Matchers.arrayWithSize(1)); + + final ConditionalClusterPrivilege conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; + assertThat(conditionalPrivilege.getCategory(), equalTo(ConditionalClusterPrivilege.Category.APPLICATION)); + assertThat(conditionalPrivilege, instanceOf(ConditionalClusterPrivileges.ManageApplicationPrivileges.class)); + assertThat(((ConditionalClusterPrivileges.ManageApplicationPrivileges) conditionalPrivilege).getApplicationNames(), + containsInAnyOrder("kibana", "logstash")); + + q = "{\"applications\": [{\"application\": \"myapp\", \"resources\": [\"*\"], \"privileges\": [\"login\" ]}] }"; + rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + assertThat(rd.getName(), equalTo("test")); + assertThat(rd.getClusterPrivileges(), emptyArray()); + assertThat(rd.getIndicesPrivileges(), emptyArray()); + assertThat(rd.getApplicationPrivileges().length, equalTo(1)); + assertThat(rd.getApplicationPrivileges()[0].getResources(), arrayContaining("*")); + assertThat(rd.getApplicationPrivileges()[0].getPrivileges(), arrayContaining("login")); + assertThat(rd.getApplicationPrivileges()[0].getApplication(), equalTo("myapp")); + assertThat(rd.getConditionalClusterPrivileges(), Matchers.arrayWithSize(0)); + + final String badJson + = "{\"applications\":[{\"not_supported\": true, \"resources\": [\"*\"], \"privileges\": [\"my-app:login\" ]}] }"; + final IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> RoleDescriptor.parse("test", new BytesArray(badJson), false, XContentType.JSON)); + assertThat(ex.getMessage(), containsString("not_supported")); } public void testSerialization() throws Exception { @@ -125,11 +217,24 @@ public void testSerialization() throws Exception { .query("{\"query\": {\"match_all\": {}}}") .build() }; + final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = { + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("my_app") + .privileges("read", "write") + .resources("*") + .build() + }; + final ConditionalClusterPrivilege[] conditionalClusterPrivileges = { + new ConditionalClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + }; + Map metadata = randomBoolean() ? MetadataUtils.DEFAULT_RESERVED_METADATA : null; - final RoleDescriptor descriptor = - new RoleDescriptor("test", new String[] { "all", "none" }, groups, new String[] { "sudo" }, metadata); + final RoleDescriptor descriptor = new RoleDescriptor("test", new String[]{"all", "none"}, groups, applicationPrivileges, + conditionalClusterPrivileges, new String[] { "sudo" }, metadata, null); RoleDescriptor.writeTo(descriptor, output); - StreamInput streamInput = ByteBufferStreamInput.wrap(BytesReference.toBytes(output.bytes())); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin(Settings.EMPTY).getNamedWriteables()); + StreamInput streamInput = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(output.bytes())), + registry); final RoleDescriptor serialized = RoleDescriptor.readFrom(streamInput); assertEquals(descriptor, serialized); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index b33e93d0806f8..0c2ab1ecc7650 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -7,6 +7,10 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -22,18 +26,26 @@ import org.elasticsearch.license.TestUtils.UpdatableLicenseState; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Map; @@ -42,6 +54,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Function; +import java.util.function.Predicate; import static org.elasticsearch.mock.orig.Mockito.times; import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; @@ -103,7 +116,8 @@ public void testRolesWhenDlsFlsUnlicensed() throws IOException { when(fileRolesStore.roleDescriptors(Collections.singleton("fls_dls"))).thenReturn(Collections.singleton(flsDlsRole)); when(fileRolesStore.roleDescriptors(Collections.singleton("no_fls_dls"))).thenReturn(Collections.singleton(noFlsDlsRole)); CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, mock(NativeRolesStore.class), - mock(ReservedRolesStore.class), Collections.emptyList(), new ThreadContext(Settings.EMPTY), licenseState); + mock(ReservedRolesStore.class), mock(NativePrivilegeStore.class), Collections.emptyList(), + new ThreadContext(Settings.EMPTY), licenseState); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); PlainActionFuture roleFuture = new PlainActionFuture<>(); @@ -163,7 +177,8 @@ public void testRolesWhenDlsFlsLicensed() throws IOException { when(fileRolesStore.roleDescriptors(Collections.singleton("fls_dls"))).thenReturn(Collections.singleton(flsDlsRole)); when(fileRolesStore.roleDescriptors(Collections.singleton("no_fls_dls"))).thenReturn(Collections.singleton(noFlsDlsRole)); CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, mock(NativeRolesStore.class), - mock(ReservedRolesStore.class), Collections.emptyList(), new ThreadContext(Settings.EMPTY), licenseState); + mock(ReservedRolesStore.class), mock(NativePrivilegeStore.class), Collections.emptyList(), + new ThreadContext(Settings.EMPTY), licenseState); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); PlainActionFuture roleFuture = new PlainActionFuture<>(); @@ -196,7 +211,7 @@ public void testNegativeLookupsAreCached() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, - Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), + mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); verify(fileRolesStore).addListener(any(Runnable.class)); // adds a listener in ctor @@ -274,8 +289,8 @@ public void testCustomRolesProviders() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, - Arrays.asList(inMemoryProvider1, inMemoryProvider2), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); + mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider1, inMemoryProvider2), + new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); final Set roleNames = Sets.newHashSet("roleA", "roleB", "unknown"); PlainActionFuture future = new PlainActionFuture<>(); @@ -328,7 +343,9 @@ public void testMergingRolesWithFls() { .build() }, null); FieldPermissionsCache cache = new FieldPermissionsCache(Settings.EMPTY); - Role role = CompositeRolesStore.buildRoleFromDescriptors(Sets.newHashSet(flsRole, addsL1Fields), cache); + PlainActionFuture future = new PlainActionFuture<>(); + CompositeRolesStore.buildRoleFromDescriptors(Sets.newHashSet(flsRole, addsL1Fields), cache, null, future); + Role role = future.actionGet(); MetaData metaData = MetaData.builder() .put(new IndexMetaData.Builder("test") @@ -343,6 +360,111 @@ public void testMergingRolesWithFls() { assertTrue(acls.get("test").getFieldPermissions().grantsAccessTo("L3.foo")); } + public void testMergingBasicRoles() { + final TransportRequest request1 = mock(TransportRequest.class); + final TransportRequest request2 = mock(TransportRequest.class); + final TransportRequest request3 = mock(TransportRequest.class); + + ConditionalClusterPrivilege ccp1 = mock(ConditionalClusterPrivilege.class); + when(ccp1.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); + when(ccp1.getRequestPredicate()).thenReturn(req -> req == request1); + RoleDescriptor role1 = new RoleDescriptor("r1", new String[]{"monitor"}, new IndicesPrivileges[]{ + IndicesPrivileges.builder() + .indices("abc-*", "xyz-*") + .privileges("read") + .build(), + IndicesPrivileges.builder() + .indices("ind-1-*") + .privileges("all") + .build(), + }, new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app1") + .resources("user/*") + .privileges("read", "write") + .build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app1") + .resources("settings/*") + .privileges("read") + .build() + }, new ConditionalClusterPrivilege[] { ccp1 }, + new String[]{"app-user-1"}, null, null); + + ConditionalClusterPrivilege ccp2 = mock(ConditionalClusterPrivilege.class); + when(ccp2.getPrivilege()).thenReturn(ClusterPrivilege.MANAGE_SECURITY); + when(ccp2.getRequestPredicate()).thenReturn(req -> req == request2); + RoleDescriptor role2 = new RoleDescriptor("r2", new String[]{"manage_saml"}, new IndicesPrivileges[]{ + IndicesPrivileges.builder() + .indices("abc-*", "ind-2-*") + .privileges("all") + .build() + }, new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app2a") + .resources("*") + .privileges("all") + .build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app2b") + .resources("*") + .privileges("read") + .build() + }, new ConditionalClusterPrivilege[] { ccp2 }, + new String[]{"app-user-2"}, null, null); + + FieldPermissionsCache cache = new FieldPermissionsCache(Settings.EMPTY); + PlainActionFuture future = new PlainActionFuture<>(); + final NativePrivilegeStore privilegeStore = mock(NativePrivilegeStore.class); + doAnswer(inv -> { + assertTrue(inv.getArguments().length == 3); + ActionListener> listener + = (ActionListener>) inv.getArguments()[2]; + Set set = new HashSet<>(); + Arrays.asList("app1", "app2a", "app2b").forEach( + app -> Arrays.asList("read", "write", "all").forEach( + perm -> set.add( + new ApplicationPrivilegeDescriptor(app, perm, Collections.emptySet(), Collections.emptyMap()) + ))); + listener.onResponse(set); + return null; + }).when(privilegeStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); + CompositeRolesStore.buildRoleFromDescriptors(Sets.newHashSet(role1, role2), cache, privilegeStore, future); + Role role = future.actionGet(); + + assertThat(role.cluster().check(ClusterStateAction.NAME, randomFrom(request1, request2, request3)), equalTo(true)); + assertThat(role.cluster().check(SamlAuthenticateAction.NAME, randomFrom(request1, request2, request3)), equalTo(true)); + assertThat(role.cluster().check(ClusterUpdateSettingsAction.NAME, randomFrom(request1, request2, request3)), equalTo(false)); + + assertThat(role.cluster().check(PutUserAction.NAME, randomFrom(request1, request2)), equalTo(true)); + assertThat(role.cluster().check(PutUserAction.NAME, request3), equalTo(false)); + + final Predicate allowedRead = role.indices().allowedIndicesMatcher(GetAction.NAME); + assertThat(allowedRead.test("abc-123"), equalTo(true)); + assertThat(allowedRead.test("xyz-000"), equalTo(true)); + assertThat(allowedRead.test("ind-1-a"), equalTo(true)); + assertThat(allowedRead.test("ind-2-a"), equalTo(true)); + assertThat(allowedRead.test("foo"), equalTo(false)); + assertThat(allowedRead.test("abc"), equalTo(false)); + assertThat(allowedRead.test("xyz"), equalTo(false)); + assertThat(allowedRead.test("ind-3-a"), equalTo(false)); + + final Predicate allowedWrite = role.indices().allowedIndicesMatcher(IndexAction.NAME); + assertThat(allowedWrite.test("abc-123"), equalTo(true)); + assertThat(allowedWrite.test("xyz-000"), equalTo(false)); + assertThat(allowedWrite.test("ind-1-a"), equalTo(true)); + assertThat(allowedWrite.test("ind-2-a"), equalTo(true)); + assertThat(allowedWrite.test("foo"), equalTo(false)); + assertThat(allowedWrite.test("abc"), equalTo(false)); + assertThat(allowedWrite.test("xyz"), equalTo(false)); + assertThat(allowedWrite.test("ind-3-a"), equalTo(false)); + + role.application().grants(new ApplicationPrivilege("app1", "app1-read", "write"), "user/joe"); + role.application().grants(new ApplicationPrivilege("app1", "app1-read", "read"), "settings/hostname"); + role.application().grants(new ApplicationPrivilege("app2a", "app2a-all", "all"), "user/joe"); + role.application().grants(new ApplicationPrivilege("app2b", "app2b-read", "read"), "settings/hostname"); + } + public void testCustomRolesProviderFailures() throws Exception { final FileRolesStore fileRolesStore = mock(FileRolesStore.class); when(fileRolesStore.roleDescriptors(anySetOf(String.class))).thenReturn(Collections.emptySet()); @@ -370,8 +492,8 @@ public void testCustomRolesProviderFailures() throws Exception { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, - Arrays.asList(inMemoryProvider1, failingProvider), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); + mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider1, failingProvider), + new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); final Set roleNames = Sets.newHashSet("roleA", "roleB", "unknown"); PlainActionFuture future = new PlainActionFuture<>(); @@ -411,7 +533,7 @@ public void testCustomRolesProvidersLicensing() { // these licenses don't allow custom role providers xPackLicenseState.update(randomFrom(OperationMode.BASIC, OperationMode.GOLD, OperationMode.STANDARD), true, null); CompositeRolesStore compositeRolesStore = new CompositeRolesStore( - Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, + Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState); Set roleNames = Sets.newHashSet("roleA"); @@ -424,7 +546,7 @@ public void testCustomRolesProvidersLicensing() { assertEquals(0, role.indices().groups().length); compositeRolesStore = new CompositeRolesStore( - Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, + Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState); // these licenses allow custom role providers xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), true, null); @@ -439,7 +561,7 @@ public void testCustomRolesProvidersLicensing() { // license expired, don't allow custom role providers compositeRolesStore = new CompositeRolesStore( - Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, + Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState); xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), false, null); roleNames = Sets.newHashSet("roleA"); @@ -459,7 +581,8 @@ public void testCacheClearOnIndexHealthChange() { CompositeRolesStore compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, mock(FileRolesStore.class), mock(NativeRolesStore.class), mock(ReservedRolesStore.class), - Collections.emptyList(), new ThreadContext(Settings.EMPTY), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)) { + mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(Settings.EMPTY), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS)) { @Override public void invalidateAll() { numInvalidation.incrementAndGet(); @@ -502,9 +625,10 @@ public void invalidateAll() { public void testCacheClearOnIndexOutOfDateChange() { final AtomicInteger numInvalidation = new AtomicInteger(0); - CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, mock(FileRolesStore.class), - mock(NativeRolesStore.class), mock(ReservedRolesStore.class), - Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)) { + CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, + mock(FileRolesStore.class), mock(NativeRolesStore.class), mock(ReservedRolesStore.class), + mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS)) { @Override public void invalidateAll() { numInvalidation.incrementAndGet(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 14be1e260db36..1e2428e77791b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.audit.logfile.CapturingLogger; @@ -123,7 +124,7 @@ public void testParseFile() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role3" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); assertThat(role.indices().groups().length, is(1)); @@ -147,7 +148,7 @@ public void testParseFile() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_run_as" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); assertThat(role.indices(), is(IndicesPermission.NONE)); assertThat(role.runAs(), notNullValue()); assertThat(role.runAs().check("user1"), is(true)); @@ -160,7 +161,7 @@ public void testParseFile() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_run_as1" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); assertThat(role.indices(), is(IndicesPermission.NONE)); assertThat(role.runAs(), notNullValue()); assertThat(role.runAs().check("user1"), is(true)); @@ -173,7 +174,7 @@ public void testParseFile() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_fields" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); @@ -195,7 +196,7 @@ public void testParseFile() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_query" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); @@ -216,7 +217,7 @@ public void testParseFile() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role_query_fields" })); assertThat(role.cluster(), notNullValue()); - assertThat(role.cluster(), is(ClusterPermission.NONE)); + assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE)); assertThat(role.runAs(), is(RunAsPermission.NONE)); assertThat(role.indices(), notNullValue()); assertThat(role.indices().groups(), notNullValue()); @@ -341,14 +342,15 @@ public void testAutoReload() throws Exception { fail("Waited too long for the updated file to be picked up"); } + final TransportRequest request = mock(TransportRequest.class); descriptors = store.roleDescriptors(Collections.singleton("role5")); assertThat(descriptors, notNullValue()); assertEquals(1, descriptors.size()); Role role = Role.builder(descriptors.iterator().next(), null).build(); assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "role5" })); - assertThat(role.cluster().check("cluster:monitor/foo/bar"), is(true)); - assertThat(role.cluster().check("cluster:admin/foo/bar"), is(false)); + assertThat(role.cluster().check("cluster:monitor/foo/bar", request), is(true)); + assertThat(role.cluster().check("cluster:admin/foo/bar", request), is(false)); } finally { if (watcherService != null) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java new file mode 100644 index 0000000000000..89058cf4a8bb9 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -0,0 +1,300 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.store; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; +import org.mockito.Mockito; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.everyItem; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.not; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +@TestLogging("org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore:TRACE") +public class NativePrivilegeStoreTests extends ESTestCase { + + private NativePrivilegeStore store; + private List requests; + private AtomicReference listener; + private Client client; + + @Before + public void setup() { + requests = new ArrayList<>(); + listener = new AtomicReference<>(); + client = new NoOpClient(getTestName()) { + @Override + protected + void doExecute(Action action, Request request, ActionListener listener) { + NativePrivilegeStoreTests.this.requests.add(request); + NativePrivilegeStoreTests.this.listener.set(listener); + } + }; + final SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); + when(securityIndex.isAvailable()).thenReturn(true); + Mockito.doAnswer(invocationOnMock -> { + assertThat(invocationOnMock.getArguments().length, equalTo(2)); + assertThat(invocationOnMock.getArguments()[1], instanceOf(Runnable.class)); + ((Runnable) invocationOnMock.getArguments()[1]).run(); + return null; + }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); + store = new NativePrivilegeStore(Settings.EMPTY, client, securityIndex); + } + + @After + public void cleanup() { + client.close(); + } + + public void testGetSinglePrivilegeByName() throws Exception { + final ApplicationPrivilegeDescriptor sourcePrivilege = new ApplicationPrivilegeDescriptor("myapp", "admin", + newHashSet("action:admin/*", "action:login", "data:read/*"), emptyMap() + ); + + final PlainActionFuture future = new PlainActionFuture<>(); + store.getPrivilege("myapp", "admin", future); + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(GetRequest.class)); + GetRequest request = (GetRequest) requests.get(0); + assertThat(request.index(), equalTo(SecurityIndexManager.SECURITY_INDEX_NAME)); + assertThat(request.type(), equalTo("doc")); + assertThat(request.id(), equalTo("application-privilege_myapp:admin")); + + final String docSource = Strings.toString(sourcePrivilege); + listener.get().onResponse(new GetResponse( + new GetResult(request.index(), request.type(), request.id(), 1L, true, new BytesArray(docSource), emptyMap()) + )); + final ApplicationPrivilegeDescriptor getPrivilege = future.get(1, TimeUnit.SECONDS); + assertThat(getPrivilege, equalTo(sourcePrivilege)); + } + + public void testGetMissingPrivilege() throws Exception { + final PlainActionFuture future = new PlainActionFuture<>(); + store.getPrivilege("myapp", "admin", future); + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(GetRequest.class)); + GetRequest request = (GetRequest) requests.get(0); + assertThat(request.index(), equalTo(SecurityIndexManager.SECURITY_INDEX_NAME)); + assertThat(request.type(), equalTo("doc")); + assertThat(request.id(), equalTo("application-privilege_myapp:admin")); + + listener.get().onResponse(new GetResponse( + new GetResult(request.index(), request.type(), request.id(), -1, false, null, emptyMap()) + )); + final ApplicationPrivilegeDescriptor getPrivilege = future.get(1, TimeUnit.SECONDS); + assertThat(getPrivilege, Matchers.nullValue()); + } + + public void testGetPrivilegesByApplicationName() throws Exception { + final List sourcePrivileges = Arrays.asList( + new ApplicationPrivilegeDescriptor("myapp", "admin", newHashSet("action:admin/*", "action:login", "data:read/*"), emptyMap()), + new ApplicationPrivilegeDescriptor("myapp", "user", newHashSet("action:login", "data:read/*"), emptyMap()), + new ApplicationPrivilegeDescriptor("myapp", "author", newHashSet("action:login", "data:read/*", "data:write/*"), emptyMap()) + ); + + final PlainActionFuture> future = new PlainActionFuture<>(); + store.getPrivileges(Arrays.asList("myapp", "yourapp"), null, future); + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(SearchRequest.class)); + SearchRequest request = (SearchRequest) requests.get(0); + assertThat(request.indices(), arrayContaining(SecurityIndexManager.SECURITY_INDEX_NAME)); + + final String query = Strings.toString(request.source().query()); + assertThat(query, containsString("{\"terms\":{\"application\":[\"myapp\",\"yourapp\"]")); + assertThat(query, containsString("{\"term\":{\"type\":{\"value\":\"application-privilege\"")); + + final SearchHit[] hits = buildHits(sourcePrivileges); + listener.get().onResponse(new SearchResponse(new SearchResponseSections( + new SearchHits(hits, hits.length, 0f), null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null)); + + assertResult(sourcePrivileges, future); + } + + public void testGetAllPrivileges() throws Exception { + final List sourcePrivileges = Arrays.asList( + new ApplicationPrivilegeDescriptor("app1", "admin", newHashSet("action:admin/*", "action:login", "data:read/*"), emptyMap()), + new ApplicationPrivilegeDescriptor("app2", "user", newHashSet("action:login", "data:read/*"), emptyMap()), + new ApplicationPrivilegeDescriptor("app3", "all", newHashSet("*"), emptyMap()) + ); + + final PlainActionFuture> future = new PlainActionFuture<>(); + store.getPrivileges(null, null, future); + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(SearchRequest.class)); + SearchRequest request = (SearchRequest) requests.get(0); + assertThat(request.indices(), arrayContaining(SecurityIndexManager.SECURITY_INDEX_NAME)); + + final String query = Strings.toString(request.source().query()); + assertThat(query, containsString("{\"term\":{\"type\":{\"value\":\"application-privilege\"")); + assertThat(query, not(containsString("{\"terms\""))); + + final SearchHit[] hits = buildHits(sourcePrivileges); + listener.get().onResponse(new SearchResponse(new SearchResponseSections( + new SearchHits(hits, hits.length, 0f), null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null)); + + assertResult(sourcePrivileges, future); + } + + public void testPutPrivileges() throws Exception { + final List putPrivileges = Arrays.asList( + new ApplicationPrivilegeDescriptor("app1", "admin", newHashSet("action:admin/*", "action:login", "data:read/*"), emptyMap()), + new ApplicationPrivilegeDescriptor("app1", "user", newHashSet("action:login", "data:read/*"), emptyMap()), + new ApplicationPrivilegeDescriptor("app2", "all", newHashSet("*"), emptyMap()) + ); + + final PlainActionFuture>> future = new PlainActionFuture<>(); + store.putPrivileges(putPrivileges, WriteRequest.RefreshPolicy.IMMEDIATE, future); + assertThat(requests, iterableWithSize(putPrivileges.size())); + assertThat(requests, everyItem(instanceOf(IndexRequest.class))); + + final List indexRequests = new ArrayList<>(requests.size()); + requests.stream().map(IndexRequest.class::cast).forEach(indexRequests::add); + requests.clear(); + + final ActionListener indexListener = listener.get(); + final String uuid = UUIDs.randomBase64UUID(random()); + for (int i = 0; i < putPrivileges.size(); i++) { + ApplicationPrivilegeDescriptor privilege = putPrivileges.get(i); + IndexRequest request = indexRequests.get(i); + assertThat(request.indices(), arrayContaining(SecurityIndexManager.SECURITY_INDEX_NAME)); + assertThat(request.type(), equalTo("doc")); + assertThat(request.id(), equalTo( + "application-privilege_" + privilege.getApplication() + ":" + privilege.getName() + )); + final XContentBuilder builder = privilege.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), true); + assertThat(request.source(), equalTo(BytesReference.bytes(builder))); + final boolean created = privilege.getName().equals("user") == false; + indexListener.onResponse(new IndexResponse( + new ShardId(SecurityIndexManager.SECURITY_INDEX_NAME, uuid, i), + request.type(), request.id(), 1, 1, 1, created + )); + } + + awaitBusy(() -> requests.size() > 0, 1, TimeUnit.SECONDS); + + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(ClearRolesCacheRequest.class)); + listener.get().onResponse(null); + + final Map> map = future.actionGet(); + assertThat(map.entrySet(), iterableWithSize(2)); + assertThat(map.get("app1"), iterableWithSize(1)); + assertThat(map.get("app2"), iterableWithSize(1)); + assertThat(map.get("app1"), contains("admin")); + assertThat(map.get("app2"), contains("all")); + } + + public void testDeletePrivileges() throws Exception { + final List privilegeNames = Arrays.asList("p1", "p2", "p3"); + + final PlainActionFuture>> future = new PlainActionFuture<>(); + store.deletePrivileges("app1", privilegeNames, WriteRequest.RefreshPolicy.IMMEDIATE, future); + assertThat(requests, iterableWithSize(privilegeNames.size())); + assertThat(requests, everyItem(instanceOf(DeleteRequest.class))); + + final List deletes = new ArrayList<>(requests.size()); + requests.stream().map(DeleteRequest.class::cast).forEach(deletes::add); + requests.clear(); + + final ActionListener deleteListener = listener.get(); + final String uuid = UUIDs.randomBase64UUID(random()); + for (int i = 0; i < privilegeNames.size(); i++) { + String name = privilegeNames.get(i); + DeleteRequest request = deletes.get(i); + assertThat(request.indices(), arrayContaining(SecurityIndexManager.SECURITY_INDEX_NAME)); + assertThat(request.type(), equalTo("doc")); + assertThat(request.id(), equalTo("application-privilege_app1:" + name)); + final boolean found = name.equals("p2") == false; + deleteListener.onResponse(new DeleteResponse( + new ShardId(SecurityIndexManager.SECURITY_INDEX_NAME, uuid, i), + request.type(), request.id(), 1, 1, 1, found + )); + } + + awaitBusy(() -> requests.size() > 0, 1, TimeUnit.SECONDS); + + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(ClearRolesCacheRequest.class)); + listener.get().onResponse(null); + + final Map> map = future.actionGet(); + assertThat(map.entrySet(), iterableWithSize(1)); + assertThat(map.get("app1"), iterableWithSize(2)); + assertThat(map.get("app1"), containsInAnyOrder("p1", "p3")); + } + + private SearchHit[] buildHits(List sourcePrivileges) { + final SearchHit[] hits = new SearchHit[sourcePrivileges.size()]; + for (int i = 0; i < hits.length; i++) { + final ApplicationPrivilegeDescriptor p = sourcePrivileges.get(i); + hits[i] = new SearchHit(i, "application-privilege_" + p.getApplication() + ":" + p.getName(), null, null); + hits[i].sourceRef(new BytesArray(Strings.toString(p))); + } + return hits; + } + + private void assertResult(List sourcePrivileges, + PlainActionFuture> future) throws Exception { + final Collection getPrivileges = future.get(1, TimeUnit.SECONDS); + assertThat(getPrivileges, iterableWithSize(sourcePrivileges.size())); + assertThat(new HashSet<>(getPrivileges), equalTo(new HashSet<>(sourcePrivileges))); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java index c66ecbec2b3ee..67bfc2ecdcb0d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java @@ -58,7 +58,6 @@ public void testAuthenticateApi() throws Exception { assertThat(response.getStatusLine().getStatusCode(), is(200)); ObjectPath objectPath = ObjectPath.createFromResponse(response); assertThat(objectPath.evaluate("username").toString(), equalTo(SecuritySettingsSource.TEST_USER_NAME)); - @SuppressWarnings("unchecked") List roles = objectPath.evaluate("roles"); assertThat(roles.size(), is(1)); assertThat(roles, contains(SecuritySettingsSource.TEST_ROLE)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/HasPrivilegesRestResponseTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/HasPrivilegesRestResponseTests.java index 601cabf4f846a..645abbc8f1a6b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/HasPrivilegesRestResponseTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/HasPrivilegesRestResponseTests.java @@ -5,10 +5,6 @@ */ package org.elasticsearch.xpack.security.rest.action.user; -import java.util.Arrays; -import java.util.Collections; -import java.util.LinkedHashMap; - import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -19,6 +15,10 @@ import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.security.rest.action.user.RestHasPrivilegesAction.HasPrivilegesRestResponseBuilder; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; @@ -30,13 +30,13 @@ public void testBuildValidJsonResponse() throws Exception { final HasPrivilegesResponse actionResponse = new HasPrivilegesResponse(false, Collections.singletonMap("manage", true), Arrays.asList( - new HasPrivilegesResponse.IndexPrivileges("staff", + new HasPrivilegesResponse.ResourcePrivileges("staff", MapBuilder.newMapBuilder(new LinkedHashMap<>()) .put("read", true).put("index", true).put("delete", false).put("manage", false).map()), - new HasPrivilegesResponse.IndexPrivileges("customers", + new HasPrivilegesResponse.ResourcePrivileges("customers", MapBuilder.newMapBuilder(new LinkedHashMap<>()) .put("read", true).put("index", true).put("delete", true).put("manage", false).map()) - )); + ), Collections.emptyMap()); final XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); final RestResponse rest = response.buildResponse(actionResponse, builder); @@ -50,6 +50,8 @@ public void testBuildValidJsonResponse() throws Exception { "\"index\":{" + "\"staff\":{\"read\":true,\"index\":true,\"delete\":false,\"manage\":false}," + "\"customers\":{\"read\":true,\"index\":true,\"delete\":true,\"manage\":false}" + - "}}")); + "}," + + "\"application\":{}" + + "}")); } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index feca093e581af..70ab085fcf72b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -35,9 +35,6 @@ import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; -import javax.net.SocketFactory; -import javax.net.ssl.HandshakeCompletedListener; -import javax.net.ssl.SSLSocket; import java.io.IOException; import java.net.InetAddress; import java.net.SocketTimeoutException; @@ -47,6 +44,10 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import javax.net.SocketFactory; +import javax.net.ssl.HandshakeCompletedListener; +import javax.net.ssl.SSLSocket; + import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; @@ -119,7 +120,6 @@ protected MockTransportService build(Settings settings, Version version, Cluster @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java index 03f963cc59ca6..0835ecee9c2fd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java @@ -7,6 +7,7 @@ import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -92,6 +93,7 @@ protected boolean transportSSLEnabled() { } public void testThatSSLConfigurationReloadsOnModification() throws Exception { + assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0); Path keyPath = createTempDir().resolve("testnode_updated.pem"); Path certPath = createTempDir().resolve("testnode_updated.crt"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath); diff --git a/x-pack/plugin/security/src/test/resources/kdc.ldiff b/x-pack/plugin/security/src/test/resources/kdc.ldiff new file mode 100644 index 0000000000000..e213048d6f578 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/kdc.ldiff @@ -0,0 +1,23 @@ +dn: dc=example,dc=com +objectClass: top +objectClass: domain +dc: example + +dn: ou=system,dc=example,dc=com +objectClass: organizationalUnit +objectClass: top +ou: system + +dn: ou=users,dc=example,dc=com +objectClass: organizationalUnit +objectClass: top +ou: users + +dn: uid=admin,ou=system,dc=example,dc=com +objectClass: top +objectClass: person +objectClass: inetOrgPerson +cn: Admin +sn: Admin +uid: admin +userPassword: secret \ No newline at end of file diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index 436837e85b4f0..039e78c14952c 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -19,7 +19,7 @@ archivesBaseName = 'x-pack-sql' integTest.enabled = false dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') compileOnly(project(':modules:lang-painless')) { // exclude ASM to not affect featureAware task on Java 10+ exclude group: "org.ow2.asm" diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index f1d9eb1fb3f24..f1495f4f3acc3 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.Response; @@ -219,7 +220,6 @@ private void disableMonitoring() throws Exception { @SuppressWarnings("unchecked") final Map node = (Map) nodes.values().iterator().next(); - @SuppressWarnings("unchecked") final Number activeWrites = (Number) extractValue("thread_pool.write.active", node); return activeWrites != null && activeWrites.longValue() == 0L; } catch (Exception e) { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_privileges.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_privileges.json new file mode 100644 index 0000000000000..6086e46eade65 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_privileges.json @@ -0,0 +1,30 @@ +{ + "xpack.security.delete_privileges": { + "documentation": "TODO", + "methods": [ "DELETE" ], + "url": { + "path": "/_xpack/security/privilege/{application}/{name}", + "paths": [ "/_xpack/security/privilege/{application}/{name}" ], + "parts": { + "application": { + "type" : "string", + "description" : "Application name", + "required" : true + }, + "name": { + "type" : "string", + "description" : "Privilege name", + "required" : true + } + }, + "params": { + "refresh": { + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." + } + } + }, + "body": null + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_privileges.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_privileges.json new file mode 100644 index 0000000000000..4286ffa954b99 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_privileges.json @@ -0,0 +1,24 @@ +{ + "xpack.security.get_privileges": { + "documentation": "TODO", + "methods": [ "GET" ], + "url": { + "path": "/_xpack/security/privilege/{application}/{name}", + "paths": [ "/_xpack/security/privilege/{application}/{name}" ], + "parts": { + "application": { + "type" : "string", + "description" : "Application name", + "required" : false + }, + "name": { + "type" : "string", + "description" : "Privilege name", + "required" : false + } + }, + "params": {} + }, + "body": null + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.has_privileges.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.has_privileges.json new file mode 100644 index 0000000000000..64b15ae9c0222 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.has_privileges.json @@ -0,0 +1,22 @@ +{ + "xpack.security.has_privileges": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-privileges.html", + "methods": [ "GET", "POST" ], + "url": { + "path": "/_xpack/security/user/_has_privileges", + "paths": [ "/_xpack/security/user/_has_privileges", "/_xpack/security/user/{user}/_has_privileges" ], + "parts": { + "user": { + "type" : "string", + "description" : "Username", + "required" : false + } + }, + "params": {} + }, + "body": { + "description" : "The privileges to test", + "required" : true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privilege.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privilege.json new file mode 100644 index 0000000000000..3d453682c6431 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privilege.json @@ -0,0 +1,33 @@ +{ + "xpack.security.put_privilege": { + "documentation": "TODO", + "methods": [ "POST", "PUT" ], + "url": { + "path": "/_xpack/security/privilege/{application}/{name}", + "paths": [ "/_xpack/security/privilege/{application}/{name}" ], + "parts": { + "application": { + "type" : "string", + "description" : "Application name", + "required" : true + }, + "name": { + "type" : "string", + "description" : "Privilege name", + "required" : true + } + }, + "params": { + "refresh": { + "type" : "enum", + "options": ["true", "false", "wait_for"], + "description" : "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." + } + } + }, + "body": { + "description" : "The privilege to add", + "required" : true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privileges.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privileges.json new file mode 100644 index 0000000000000..07eb541715810 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_privileges.json @@ -0,0 +1,27 @@ +{ + "xpack.security.put_privileges": { + "documentation": "TODO", + "methods": [ "POST" ], + "url": { + "path": "/_xpack/security/privilege/", + "paths": [ + "/_xpack/security/privilege/" + ], + "params": { + "refresh": { + "type": "enum", + "options": [ + "true", + "false", + "wait_for" + ], + "description": "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." + } + } + }, + "body": { + "description" : "The privilege(s) to add", + "required" : true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/10_basic.yml new file mode 100644 index 0000000000000..e8dddf2153576 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/10_basic.yml @@ -0,0 +1,324 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow +--- +teardown: + - do: + xpack.security.delete_privileges: + application: app + name: "p1,p2,p3" + ignore: 404 + - do: + xpack.security.delete_privileges: + application: app2 + name: "p1" + ignore: 404 + - do: + xpack.security.delete_privileges: + application: app3 + name: "p1,p2,p3,p4" + ignore: 404 + - do: + xpack.security.delete_privileges: + application: app4 + name: "p1" + ignore: 404 +--- +"Test put and get privileges": + # Single privilege, with names in URL + - do: + xpack.security.put_privilege: + application: app + name: p1 + body: > + { + "application": "app", + "name": "p1", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key1" : "val1a", + "key2" : "val2a" + } + } + - match: { "app.p1" : { created: true } } + + # Multiple privileges, no names in URL + - do: + xpack.security.put_privileges: + body: > + { + "app": { + "p2": { + "application": "app", + "name": "p2", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key1" : "val1b", + "key2" : "val2b" + } + }, + "p3": { + "application": "app", + "name": "p3", + "actions": [ "data:write/*" , "action:login" ], + "metadata": { + "key1" : "val1c", + "key2" : "val2c" + } + } + }, + "app2" : { + "p1" : { + "application": "app2", + "name": "p1", + "actions": [ "*" ] + } + } + } + - match: { "app.p2" : { created: true } } + - match: { "app.p3" : { created: true } } + - match: { "app2.p1" : { created: true } } + + # Update existing privilege, with names in URL + - do: + xpack.security.put_privilege: + application: app + name: p1 + body: > + { + "application": "app", + "name": "p1", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key3" : "val3" + } + } + - match: { "app.p1" : { created: false } } + + # Get the privilege back + - do: + xpack.security.get_privileges: + application: app + name: p1 + + - match: { + "app.p1" : { + "application": "app", + "name": "p1", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key3" : "val3" + } + } + } + + # Get 2 privileges back + - do: + xpack.security.get_privileges: + application: app + name: p1,p2 + + - match: { + "app.p1" : { + "application": "app", + "name": "p1", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key3" : "val3" + } + } + } + - match: { + "app.p2" : { + "application": "app", + "name": "p2", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key1" : "val1b", + "key2" : "val2b" + } + } + } + + # Get all (3) privileges back for "app" + - do: + xpack.security.get_privileges: + application: "app" + name: "" + + - match: { + "app.p1" : { + "application": "app", + "name": "p1", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key3" : "val3" + } + } + } + - match: { + "app.p2" : { + "application": "app", + "name": "p2", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key1" : "val1b", + "key2" : "val2b" + } + } + } + - match: { + "app.p3" : { + "application": "app", + "name": "p3", + "actions": [ "data:write/*" , "action:login" ], + "metadata": { + "key1" : "val1c", + "key2" : "val2c" + } + } + } + + # Get all (4) privileges back for all apps + - do: + xpack.security.get_privileges: + application: "" + name: "" + + - match: { + "app.p1" : { + "application": "app", + "name": "p1", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key3" : "val3" + } + } + } + - match: { + "app.p2" : { + "application": "app", + "name": "p2", + "actions": [ "data:read/*" , "action:login" ], + "metadata": { + "key1" : "val1b", + "key2" : "val2b" + } + } + } + - match: { + "app.p3" : { + "application": "app", + "name": "p3", + "actions": [ "data:write/*" , "action:login" ], + "metadata": { + "key1" : "val1c", + "key2" : "val2c" + } + } + } + - match: { + "app2.p1" : { + "application": "app2", + "name": "p1", + "actions": [ "*" ], + "metadata": { } + } + } + +--- +"Test put and delete privileges": + # Store some privileges + - do: + xpack.security.put_privileges: + body: > + { + "app3": { + "p1": { + "application": "app3", + "name": "p1", + "actions": [ "data:read/*" ] + }, + "p2": { + "application": "app3", + "name": "p2", + "actions": [ "data:write/*" ] + }, + "p3": { + "application": "app3", + "name": "p3", + "actions": [ "data:write/*", "data:read/*" ] + }, + "p4": { + "application": "app3", + "name": "p4", + "actions": [ "*" ] + } + }, + "app4": { + "p1": { + "application": "app4", + "name": "p1", + "actions": [ "*" ] + } + } + } + - match: { "app3.p1" : { created: true } } + - match: { "app3.p2" : { created: true } } + - match: { "app3.p3" : { created: true } } + - match: { "app3.p4" : { created: true } } + - match: { "app4.p1" : { created: true } } + + # Delete 1 privilege + - do: + xpack.security.delete_privileges: + application: app3 + name: p1 + + - match: { "app3.p1" : { "found" : true } } + + # Delete 2 more privileges (p2, p3) + # and try to delete two that don't exist (p1, p0) + - do: + xpack.security.delete_privileges: + application: app3 + name: p1,p2,p3,p0 + + - match: { "app3.p1" : { "found" : false} } + - match: { "app3.p2" : { "found" : true } } + - match: { "app3.p3" : { "found" : true } } + - match: { "app3.p0" : { "found" : false} } + + # Check the deleted privileges are gone + - do: + catch: missing + xpack.security.get_privileges: + application: app3 + name: p1,p2,p3 + + # Check the non-deleted privileges are there + - do: + xpack.security.get_privileges: + application: "" + name: "" + - match: { + "app3.p4" : { + "application": "app3", + "name": "p4", + "actions": [ "*" ], + "metadata": { } + } + } + - match: { + "app4.p1" : { + "application": "app4", + "name": "p1", + "actions": [ "*" ], + "metadata": { } + } + } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml new file mode 100644 index 0000000000000..1860564863fb2 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml @@ -0,0 +1,190 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + + # Create some privileges + - do: + xpack.security.put_privileges: + body: > + { + "myapp": { + "user": { + "application": "myapp", + "name": "user", + "actions": [ "action:login", "version:1.0.*" ] + }, + "read": { + "application": "myapp", + "name": "read", + "actions": [ "data:read/*" ] + }, + "write": { + "application": "myapp", + "name": "write", + "actions": [ "data:write/*" ] + } + } + } + + # Store 2 test roles + - do: + xpack.security.put_role: + name: "myapp_engineering_read" + body: > + { + "cluster": [], + "indices": [ + { + "names": "engineering-*", + "privileges": ["read"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": ["user"], + "resources": ["*"] + }, + { + "application": "myapp", + "privileges": ["read"], + "resources": ["engineering/*"] + } + ] + } + + - do: + xpack.security.put_role: + name: "myapp_engineering_write" + body: > + { + "cluster": [], + "indices": [ + { + "names": "engineering-*", + "privileges": ["read"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": ["user"], + "resources": ["*"] + }, + { + "application": "myapp", + "privileges": ["read", "write"], + "resources": ["engineering/*"] + } + ] + } + + # And a user for each role + - do: + xpack.security.put_user: + username: "eng_read" + body: > + { + "password": "p@ssw0rd", + "roles" : [ "myapp_engineering_read" ] + } + - do: + xpack.security.put_user: + username: "eng_write" + body: > + { + "password": "p@ssw0rd", + "roles" : [ "myapp_engineering_write" ] + } + +--- +teardown: + - do: + xpack.security.delete_privileges: + application: myapp + name: "user,read,write" + ignore: 404 + + - do: + xpack.security.delete_user: + username: "eng_read" + ignore: 404 + + - do: + xpack.security.delete_user: + username: "eng_write" + ignore: 404 + + - do: + xpack.security.delete_role: + name: "myapp_engineering_read" + ignore: 404 + + - do: + xpack.security.delete_role: + name: "myapp_engineering_write" + ignore: 404 +--- +"Test has_privileges with application-privileges": + - do: + headers: { Authorization: "Basic ZW5nX3JlYWQ6cEBzc3cwcmQ=" } # eng_read + xpack.security.has_privileges: + user: null + body: > + { + "index": [ + { + "names" :[ "engineering-logs", "product-logs" ], + "privileges" : [ "read", "index", "write" ] + } + ], + "application": [ + { + "application" : "myapp", + "resources" : [ "*" ], + "privileges" : [ "action:login", "version:1.0.3" ] + }, + { + "application" : "myapp", + "resources" : [ "engineering/logs/*", "product/logs/*" ], + "privileges" : [ "data:read/log/raw", "data:write/log/raw" ] + } + ] + } + + - match: { "username" : "eng_read" } + - match: { "has_all_requested" : false } + - match: { "index" : { + "engineering-logs" : { + "read": true, + "index": false, + "write": false + }, + "product-logs" : { + "read": false, + "index": false, + "write": false + } + } } + - match: { "application" : { + "myapp" : { + "*" : { + "action:login" : true, + "version:1.0.3" : true + }, + "engineering/logs/*" : { + "data:read/log/raw" : true, + "data:write/log/raw" : false + }, + "product/logs/*" : { + "data:read/log/raw" : false, + "data:write/log/raw" : false + } + } + } } + diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/30_superuser.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/30_superuser.yml new file mode 100644 index 0000000000000..cbf08e94d597a --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/30_superuser.yml @@ -0,0 +1,131 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + + # Create some privileges + - do: + xpack.security.put_privileges: + body: > + { + "app01": { + "user": { + "application": "app01", + "name": "user", + "actions": [ "action:login" ] + }, + "read": { + "application": "app01", + "name": "read", + "actions": [ "data:read/*" ] + }, + "write": { + "application": "app01", + "name": "write", + "actions": [ "data:write/*" ] + } + }, + "app02": { + "user": { + "application": "app02", + "name": "user", + "actions": [ "action:login" ] + }, + "read": { + "application": "app02", + "name": "read", + "actions": [ "data:read/*" ] + }, + "write": { + "application": "app02", + "name": "write", + "actions": [ "data:write/*" ] + } + } + } + + # And a superuser + - do: + xpack.security.put_user: + username: "my_admin" + body: > + { + "password": "admin01", + "roles" : [ "superuser" ] + } + - do: + xpack.security.put_user: + username: "eng_write" + body: > + { + "password": "p@ssw0rd", + "roles" : [ "myapp_engineering_write" ] + } + +--- +teardown: + - do: + xpack.security.delete_privileges: + application: app01 + name: "user,read,write" + ignore: 404 + - do: + xpack.security.delete_privileges: + application: app02 + name: "user,read,write" + ignore: 404 + + - do: + xpack.security.delete_user: + username: "my_admin" + ignore: 404 + +--- +"Test superuser has all application-privileges": + - do: + headers: { Authorization: "Basic bXlfYWRtaW46YWRtaW4wMQ==" } # my_admin + xpack.security.has_privileges: + user: null + body: > + { + "cluster": [ "manage" ], + "index": [ + { + "names" :[ "*" ], + "privileges" : [ "read", "index", "write" ] + } + ], + "application": [ + { + "application" : "app01", + "resources" : [ "*" ], + "privileges" : [ "action:login", "data:read/secrets" ] + }, + { + "application" : "app02", + "resources" : [ "thing/1" ], + "privileges" : [ "data:write/thing" ] + } + ] + } + + - match: { "username" : "my_admin" } + - match: { "has_all_requested" : true } + - match: { "application" : { + "app01" : { + "*" : { + "action:login" : true, + "data:read/secrets" : true + } + }, + "app02" : { + "thing/1" : { + "data:write/thing" : true + } + } + } } + diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/40_global_privileges.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/40_global_privileges.yml new file mode 100644 index 0000000000000..b89efdfe56c33 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/40_global_privileges.yml @@ -0,0 +1,51 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + - do: + xpack.security.put_user: + username: "joe" + body: > + { + "password": "s3krit", + "roles" : [ "with_global" ] + } + +--- +teardown: + - do: + xpack.security.delete_user: + username: "joe" + ignore: 404 + - do: + xpack.security.delete_role: + name: "with_global" + ignore: 404 + + +--- +"Test put role with conditional security privileges": + - do: + xpack.security.put_role: + name: "with_global" + body: > + { + "global": { + "application": { + "manage": { + "applications": [ "app1-*" , "app2-*" ] + } + } + } + } + - match: { role: { created: true } } + + - do: + xpack.security.get_role: + name: "with_global" + - match: { with_global.global.application.manage.applications.0: "app1-*" } + - match: { with_global.global.application.manage.applications.1: "app2-*" } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/40_condtional_cluster_priv.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/40_condtional_cluster_priv.yml new file mode 100644 index 0000000000000..b3a1e22069083 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/40_condtional_cluster_priv.yml @@ -0,0 +1,132 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + + - do: + xpack.security.put_user: + username: "test_user" + body: > + { + "password" : "x-pack-test-password", + "roles" : [ "app_manage" ] + } + + - do: + xpack.security.put_role: + name: "app_manage" + body: > + { + "global": { + "application": { + "manage": { + "applications": [ "app" , "app-*" ] + } + } + } + } + + - do: + xpack.security.put_privilege: + application: app-allow + name: read + body: > + { + "actions": [ "data:read/*" ] + } + + - do: + xpack.security.put_privilege: + application: app_deny + name: read + body: > + { + "actions": [ "data:read/*" ] + } + +--- +teardown: + - do: + xpack.security.delete_user: + username: "test_user" + ignore: 404 + - do: + xpack.security.delete_role: + name: "app_manage" + ignore: 404 + + - do: + xpack.security.delete_privileges: + application: app + name: read + ignore: 404 + + - do: + xpack.security.delete_privileges: + application: app-allow + name: read + ignore: 404 + + - do: + xpack.security.delete_privileges: + application: app_deny + name: read + ignore: 404 + +--- +"Test put application privileges when allowed": + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.security.put_privilege: + application: app + name: read + body: > + { + "actions": [ "data:read/*" ] + } + - match: { "app.read" : { created: true } } + +--- +"Test get application privileges when allowed": + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.security.get_privileges: + application: app-allow + name: read + + - match: { + "app-allow.read" : { + "application": "app-allow", + "name": "read", + "actions": [ "data:read/*" ], + "metadata": {} + } + } + +--- +"Test put application privileges when not allowed": + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.security.put_privilege: + application: app_deny + name: write + body: > + { + "actions": [ "data:write/*" ] + } + catch: forbidden + +--- +"Test get application privileges when not allowed": + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + xpack.security.get_privileges: + application: app_deny + name: read + catch: forbidden + diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ssl/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ssl/10_basic.yml index 7a87ef511e591..227d341b26d86 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ssl/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ssl/10_basic.yml @@ -4,7 +4,6 @@ xpack.ssl.certificates: {} - length: { $body: 1 } - - match: { $body.0.path: "test-node.jks" } - - match: { $body.0.format: "jks" } - - match: { $body.0.alias: "test-node" } + - match: { $body.0.path: "testnode.crt" } + - match: { $body.0.format: "PEM" } - match: { $body.0.has_private_key: true } diff --git a/x-pack/plugin/upgrade/build.gradle b/x-pack/plugin/upgrade/build.gradle index 8e65f87da3070..f95cde7134c56 100644 --- a/x-pack/plugin/upgrade/build.gradle +++ b/x-pack/plugin/upgrade/build.gradle @@ -14,7 +14,7 @@ esplugin { archivesBaseName = 'x-pack-upgrade' dependencies { - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } @@ -39,7 +39,7 @@ task internalClusterTest(type: RandomizedTestingTask, include '**/*IT.class' systemProperty 'es.set.netty.runtime.available.processors', 'false' } -check.dependsOn internalClusterTest +check.dependsOn internalClusterTest internalClusterTest.mustRunAfter test // also add an "alias" task to make typing on the command line easier diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index 2b7b73d37962e..a0feab6746359 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -25,7 +25,7 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') compileOnly project(path: ':modules:transport-netty4', configuration: 'runtime') compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime') diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java new file mode 100644 index 0000000000000..e135cdc50e926 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack.common; + +import java.util.Arrays; +import java.util.Map; + +/** + * Common utilities used for XPack protocol classes + */ +public final class ProtocolUtils { + + /** + * Implements equals for a map of string arrays + * + * The map of string arrays is used in some XPack protocol classes but does't work with equal. + */ + public static boolean equals(Map a, Map b) { + if (a == null) { + return b == null; + } + if (b == null) { + return false; + } + if (a.size() != b.size()) { + return false; + } + for (Map.Entry entry : a.entrySet()) { + String[] val = entry.getValue(); + String key = entry.getKey(); + if (val == null) { + if (b.get(key) != null || b.containsKey(key) == false) { + return false; + } + } else { + if (Arrays.equals(val, b.get(key)) == false) { + return false; + } + } + } + return true; + } + + /** + * Implements hashCode for map of string arrays + * + * The map of string arrays does't work with hashCode. + */ + public static int hashCode(Map a) { + int hash = 0; + for (Map.Entry entry : a.entrySet()) + hash += Arrays.hashCode(entry.getValue()); + return hash; + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java new file mode 100644 index 0000000000000..2a3ed924fbe4b --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.util.Locale; + +public enum LicensesStatus { + VALID((byte) 0), + INVALID((byte) 1), + EXPIRED((byte) 2); + + private final byte id; + + LicensesStatus(byte id) { + this.id = id; + } + + public int id() { + return id; + } + + public static LicensesStatus fromId(int id) { + if (id == 0) { + return VALID; + } else if (id == 1) { + return INVALID; + } else if (id == 2) { + return EXPIRED; + } else { + throw new IllegalStateException("no valid LicensesStatus for id=" + id); + } + } + + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + public static LicensesStatus fromString(String value) { + switch (value) { + case "valid": + return VALID; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown licenses status [" + value + "]"); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java new file mode 100644 index 0000000000000..97101a3ccd483 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + +public class PutLicenseRequest extends AcknowledgedRequest { + + private String licenseDefinition; + private boolean acknowledge = false; + + public PutLicenseRequest() { + + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public void setLicenseDefinition(String licenseDefinition) { + this.licenseDefinition = licenseDefinition; + } + + public String getLicenseDefinition() { + return licenseDefinition; + } + + public void setAcknowledge(boolean acknowledge) { + this.acknowledge = acknowledge; + } + + public boolean isAcknowledge() { + return acknowledge; + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java new file mode 100644 index 0000000000000..9c4ff51d92a10 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java @@ -0,0 +1,209 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.common.ProtocolUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class PutLicenseResponse extends AcknowledgedResponse { + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "put_license_response", true, (a, v) -> { + boolean acknowledged = (Boolean) a[0]; + LicensesStatus licensesStatus = LicensesStatus.fromString((String) a[1]); + @SuppressWarnings("unchecked") Tuple> acknowledgements = (Tuple>) a[2]; + if (acknowledgements == null) { + return new PutLicenseResponse(acknowledged, licensesStatus); + } else { + return new PutLicenseResponse(acknowledged, licensesStatus, acknowledgements.v1(), acknowledgements.v2()); + } + + }); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged")); + PARSER.declareString(constructorArg(), new ParseField("license_status")); + PARSER.declareObject(optionalConstructorArg(), (parser, v) -> { + Map acknowledgeMessages = new HashMap<>(); + String message = null; + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + if (currentFieldName == null) { + throw new XContentParseException(parser.getTokenLocation(), "expected message header or acknowledgement"); + } + if ("message".equals(currentFieldName)) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected message header type"); + } + message = parser.text(); + } else { + if (token != XContentParser.Token.START_ARRAY) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement type"); + } + List acknowledgeMessagesList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement text"); + } + acknowledgeMessagesList.add(parser.text()); + } + acknowledgeMessages.put(currentFieldName, acknowledgeMessagesList.toArray(new String[0])); + } + } + } + return new Tuple<>(message, acknowledgeMessages); + }, + new ParseField("acknowledge")); + } + + private LicensesStatus status; + private Map acknowledgeMessages; + private String acknowledgeHeader; + + public PutLicenseResponse() { + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status) { + this(acknowledged, status, null, Collections.emptyMap()); + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status, String acknowledgeHeader, + Map acknowledgeMessages) { + super(acknowledged); + this.status = status; + this.acknowledgeHeader = acknowledgeHeader; + this.acknowledgeMessages = acknowledgeMessages; + } + + public LicensesStatus status() { + return status; + } + + public Map acknowledgeMessages() { + return acknowledgeMessages; + } + + public String acknowledgeHeader() { + return acknowledgeHeader; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + status = LicensesStatus.fromId(in.readVInt()); + acknowledgeHeader = in.readOptionalString(); + int size = in.readVInt(); + Map acknowledgeMessages = new HashMap<>(size); + for (int i = 0; i < size; i++) { + String feature = in.readString(); + int nMessages = in.readVInt(); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = in.readString(); + } + acknowledgeMessages.put(feature, messages); + } + this.acknowledgeMessages = acknowledgeMessages; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(status.id()); + out.writeOptionalString(acknowledgeHeader); + out.writeVInt(acknowledgeMessages.size()); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + out.writeString(entry.getKey()); + out.writeVInt(entry.getValue().length); + for (String message : entry.getValue()) { + out.writeString(message); + } + } + } + + @Override + protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { + builder.field("license_status", status.toString()); + if (!acknowledgeMessages.isEmpty()) { + builder.startObject("acknowledge"); + builder.field("message", acknowledgeHeader); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + builder.startArray(entry.getKey()); + for (String message : entry.getValue()) { + builder.value(message); + } + builder.endArray(); + } + builder.endObject(); + } + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + public static PutLicenseResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + PutLicenseResponse that = (PutLicenseResponse) o; + + return status == that.status && + ProtocolUtils.equals(acknowledgeMessages, that.acknowledgeMessages) && + Objects.equals(acknowledgeHeader, that.acknowledgeHeader); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), status, ProtocolUtils.hashCode(acknowledgeMessages), acknowledgeHeader); + } + + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java new file mode 100644 index 0000000000000..214708327388f --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack.common; + +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +public class ProtocolUtilsTests extends ESTestCase { + + public void testMapStringEqualsAndHash() { + assertTrue(ProtocolUtils.equals(null, null)); + assertFalse(ProtocolUtils.equals(null, new HashMap<>())); + assertFalse(ProtocolUtils.equals(new HashMap<>(), null)); + + Map a = new HashMap<>(); + a.put("foo", new String[] { "a", "b" }); + a.put("bar", new String[] { "b", "c" }); + + Map b = new HashMap<>(); + b.put("foo", new String[] { "a", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "c", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "b", "c" }); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + + b.put("baz", new String[] { "b", "c" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + a.put("non", null); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("non", null); + b.remove("baz"); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java new file mode 100644 index 0000000000000..0980a07b68ad2 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + +public class PutLicenseResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // The structure of the response is such that unknown fields inside acknowledge cannot be supported since they + // are treated as messages from new services + return p -> p.startsWith("acknowledge"); + } + + @Override + protected PutLicenseResponse createTestInstance() { + boolean acknowledged = randomBoolean(); + LicensesStatus status = randomFrom(LicensesStatus.VALID, LicensesStatus.INVALID, LicensesStatus.EXPIRED); + String messageHeader; + Map ackMessages; + if (randomBoolean()) { + messageHeader = randomAlphaOfLength(10); + ackMessages = randomAckMessages(); + } else { + messageHeader = null; + ackMessages = Collections.emptyMap(); + } + + return new PutLicenseResponse(acknowledged, status, messageHeader, ackMessages); + } + + private static Map randomAckMessages() { + int nFeatures = randomIntBetween(1, 5); + + Map ackMessages = new HashMap<>(); + + for (int i = 0; i < nFeatures; i++) { + String feature = randomAlphaOfLengthBetween(9, 15); + int nMessages = randomIntBetween(1, 5); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = randomAlphaOfLengthBetween(10, 30); + } + ackMessages.put(feature, messages); + } + + return ackMessages; + } + + @Override + protected PutLicenseResponse doParseInstance(XContentParser parser) throws IOException { + return PutLicenseResponse.fromXContent(parser); + } + + @Override + protected PutLicenseResponse createBlankInstance() { + return new PutLicenseResponse(); + } + + @Override + protected PutLicenseResponse mutateInstance(PutLicenseResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new PutLicenseResponse( + r.isAcknowledged() == false, + r.status(), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> new PutLicenseResponse( + r.isAcknowledged(), + mutateStatus(r.status()), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> { + if (r.acknowledgeMessages().isEmpty()) { + return new PutLicenseResponse( + r.isAcknowledged(), + r.status(), + randomAlphaOfLength(10), + randomAckMessages() + ); + } else { + return new PutLicenseResponse(r.isAcknowledged(), r.status()); + } + } + + ); + return mutator.apply(response); + } + + private LicensesStatus mutateStatus(LicensesStatus status) { + return randomValueOtherThan(status, () -> randomFrom(LicensesStatus.values())); + } + +} diff --git a/x-pack/qa/core-rest-tests-with-security/src/test/resources/rest-api-spec/test/rankeval/10_rankeval.yml b/x-pack/qa/core-rest-tests-with-security/src/test/resources/rest-api-spec/test/rankeval/10_rankeval.yml index 6dae2bb2a6773..4720377909375 100644 --- a/x-pack/qa/core-rest-tests-with-security/src/test/resources/rest-api-spec/test/rankeval/10_rankeval.yml +++ b/x-pack/qa/core-rest-tests-with-security/src/test/resources/rest-api-spec/test/rankeval/10_rankeval.yml @@ -52,5 +52,5 @@ "metric" : { "precision": { "ignore_unlabeled" : true }} } - - match: { quality_level: 1 } + - match: { metric_score: 1 } diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 78ac1436fd8bc..3cf2970120675 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -11,7 +11,7 @@ apply plugin: 'elasticsearch.build' test.enabled = false dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile (project(path: xpackModule('security'), configuration: 'runtime')) { // Need to drop the guava dependency here or we get a conflict with watcher's guava dependency. // This is total #$%, but the solution is to get the SAML realm (which uses guava) out of security proper @@ -125,8 +125,8 @@ subprojects { String output = "${buildDir}/generated-resources/${project.name}" task copyTestNodeKeystore(type: Copy) { - from project(xpackModule('core')) - .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') + from project(':x-pack:plugin:core') + .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') into outputDir } @@ -249,7 +249,7 @@ subprojects { check.dependsOn(integTest) dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle new file mode 100644 index 0000000000000..5caf5d6947e8c --- /dev/null +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -0,0 +1,127 @@ +import java.nio.file.Path +import java.nio.file.Paths +import java.nio.file.Files + +apply plugin: 'elasticsearch.vagrantsupport' +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') +} + +// MIT Kerberos Vagrant Testing Fixture +String box = "krb5kdc" +Map vagrantEnvVars = [ + 'VAGRANT_CWD' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}", + 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', + 'VAGRANT_PROJECT_DIR' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}" +] + +task krb5kdcUpdate(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { + command 'box' + subcommand 'update' + boxName box + environmentVars vagrantEnvVars + dependsOn "vagrantCheckVersion", "virtualboxCheckVersion" +} + +task krb5kdcFixture(type: org.elasticsearch.gradle.test.VagrantFixture) { + command 'up' + args '--provision', '--provider', 'virtualbox' + boxName box + environmentVars vagrantEnvVars + dependsOn krb5kdcUpdate +} + +task krb5AddPrincipals { dependsOn krb5kdcFixture } + +List principals = [ + "HTTP/localhost", + "peppa", + "george~dino" +] +String realm = "BUILD.ELASTIC.CO" + +for (String principal : principals) { + String[] princPwdPair = principal.split('~'); + String princName = princPwdPair[0]; + String password = ""; + if (princPwdPair.length > 1) { + password = princPwdPair[1]; + } + Task create = project.tasks.create("addPrincipal#${principal}".replace('/', '_'), org.elasticsearch.gradle.vagrant.VagrantCommandTask) { + command 'ssh' + args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $princName $password" + boxName box + environmentVars vagrantEnvVars + dependsOn krb5kdcFixture + } + krb5AddPrincipals.dependsOn(create) +} + +def generatedResources = "$buildDir/generated-resources/keytabs" +task copyKeytabToGeneratedResources(type: Copy) { + Path peppaKeytab = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("peppa.keytab").toAbsolutePath() + from peppaKeytab; + into generatedResources + dependsOn krb5AddPrincipals +} + +integTestCluster { + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + setting 'xpack.security.authc.realms.file.type', 'file' + setting 'xpack.security.authc.realms.file.order', '0' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.security.audit.enabled', 'true' + // Kerberos realm + setting 'xpack.security.authc.realms.kerberos.type', 'kerberos' + setting 'xpack.security.authc.realms.kerberos.order', '1' + setting 'xpack.security.authc.realms.kerberos.keytab.path', 'es.keytab' + setting 'xpack.security.authc.realms.kerberos.krb.debug', 'true' + setting 'xpack.security.authc.realms.kerberos.remove_realm_name', 'false' + + Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() + String jvmArgsStr = " -Djava.security.krb5.conf=${krb5conf}" + " -Dsun.security.krb5.debug=true" + jvmArgs jvmArgsStr + Path esKeytab = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("HTTP_localhost.keytab").toAbsolutePath() + extraConfigFile("es.keytab", "${esKeytab}") + + setupCommand 'setupTestAdmin', + 'bin/elasticsearch-users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser" + + waitCondition = { node, ant -> + File tmpFile = new File(node.cwd, 'wait.success') + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", + dest: tmpFile.toString(), + username: 'test_admin', + password: 'x-pack-test-password', + ignoreerrors: true, + retries: 10) + return tmpFile.exists() + } + +} + +integTestRunner { + Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab") + systemProperty 'test.userkt', "peppa@${realm}" + systemProperty 'test.userkt.keytab', "${peppaKeytab}" + systemProperty 'test.userpwd', "george@${realm}" + systemProperty 'test.userpwd.password', "dino" + systemProperty 'tests.security.manager', 'true' + Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() + List jvmargs = ["-Djava.security.krb5.conf=${krb5conf}","-Dsun.security.krb5.debug=true"] + jvmArgs jvmargs +} + +if (project.rootProject.vagrantSupported == false) { + integTest.enabled = false +} else { + project.sourceSets.test.output.dir(generatedResources, builtBy: copyKeytabToGeneratedResources) + integTestCluster.dependsOn krb5AddPrincipals, krb5kdcFixture, copyKeytabToGeneratedResources + integTest.finalizedBy project(':test:fixtures:krb5kdc-fixture').halt +} diff --git a/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java new file mode 100644 index 0000000000000..d5928cb58f687 --- /dev/null +++ b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.List; +import java.util.Map; + +import javax.security.auth.login.LoginContext; + +import static org.elasticsearch.common.xcontent.XContentHelper.convertToMap; +import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +/** + * Integration test to demonstrate authentication against a real MIT Kerberos + * instance. + *

    + * Demonstrates login by keytab and login by password for given user principal + * name using rest client. + */ +public class KerberosAuthenticationIT extends ESRestTestCase { + private static final String ENABLE_KERBEROS_DEBUG_LOGS_KEY = "test.krb.debug"; + private static final String TEST_USER_WITH_KEYTAB_KEY = "test.userkt"; + private static final String TEST_USER_WITH_KEYTAB_PATH_KEY = "test.userkt.keytab"; + private static final String TEST_USER_WITH_PWD_KEY = "test.userpwd"; + private static final String TEST_USER_WITH_PWD_PASSWD_KEY = "test.userpwd.password"; + private static final String TEST_KERBEROS_REALM_NAME = "kerberos"; + + @Override + protected Settings restAdminSettings() { + final String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + /** + * Creates simple mapping that maps the users from 'kerberos' realm to + * the 'kerb_test' role. + */ + @Before + public void setupRoleMapping() throws IOException { + final String json = Strings // top-level + .toString(XContentBuilder.builder(XContentType.JSON.xContent()).startObject() + .array("roles", new String[] { "kerb_test" }) + .field("enabled", true) + .startObject("rules") + .startArray("all") + .startObject().startObject("field").field("realm.name", TEST_KERBEROS_REALM_NAME).endObject().endObject() + .endArray() // "all" + .endObject() // "rules" + .endObject()); + + final Request request = new Request("POST", "/_xpack/security/role_mapping/kerberosrolemapping"); + request.setJsonEntity(json); + final Response response = adminClient().performRequest(request); + assertOK(response); + } + + public void testLoginByKeytab() throws IOException, PrivilegedActionException { + final String userPrincipalName = System.getProperty(TEST_USER_WITH_KEYTAB_KEY); + final String keytabPath = System.getProperty(TEST_USER_WITH_KEYTAB_PATH_KEY); + final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); + final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler(userPrincipalName, + keytabPath, enabledDebugLogs); + executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); + } + + public void testLoginByUsernamePassword() throws IOException, PrivilegedActionException { + final String userPrincipalName = System.getProperty(TEST_USER_WITH_PWD_KEY); + final String password = System.getProperty(TEST_USER_WITH_PWD_PASSWD_KEY); + final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); + final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler(userPrincipalName, + new SecureString(password.toCharArray()), enabledDebugLogs); + executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); + } + + private void executeRequestAndVerifyResponse(final String userPrincipalName, + final SpnegoHttpClientConfigCallbackHandler callbackHandler) throws PrivilegedActionException, IOException { + final Request request = new Request("GET", "/_xpack/security/_authenticate"); + try (RestClient restClient = buildRestClientForKerberos(callbackHandler)) { + final AccessControlContext accessControlContext = AccessController.getContext(); + final LoginContext lc = callbackHandler.login(); + Response response = SpnegoHttpClientConfigCallbackHandler.doAsPrivilegedWrapper(lc.getSubject(), + (PrivilegedExceptionAction) () -> { + return restClient.performRequest(request); + }, accessControlContext); + + assertOK(response); + final Map map = parseResponseAsMap(response.getEntity()); + assertThat(map.get("username"), equalTo(userPrincipalName)); + assertThat(map.get("roles"), instanceOf(List.class)); + assertThat(((List) map.get("roles")), contains("kerb_test")); + } + } + + private Map parseResponseAsMap(final HttpEntity entity) throws IOException { + return convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); + } + + private RestClient buildRestClientForKerberos(final SpnegoHttpClientConfigCallbackHandler callbackHandler) throws IOException { + final Settings settings = restAdminSettings(); + final HttpHost[] hosts = getClusterHosts().toArray(new HttpHost[getClusterHosts().size()]); + + final RestClientBuilder restClientBuilder = RestClient.builder(hosts); + configureRestClientBuilder(restClientBuilder, settings); + restClientBuilder.setHttpClientConfigCallback(callbackHandler); + return restClientBuilder.build(); + } + + private static void configureRestClientBuilder(final RestClientBuilder restClientBuilder, final Settings settings) + throws IOException { + final String requestTimeoutString = settings.get(CLIENT_RETRY_TIMEOUT); + if (requestTimeoutString != null) { + final TimeValue maxRetryTimeout = TimeValue.parseTimeValue(requestTimeoutString, CLIENT_RETRY_TIMEOUT); + restClientBuilder.setMaxRetryTimeoutMillis(Math.toIntExact(maxRetryTimeout.getMillis())); + } + final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT); + if (socketTimeoutString != null) { + final TimeValue socketTimeout = TimeValue.parseTimeValue(socketTimeoutString, CLIENT_SOCKET_TIMEOUT); + restClientBuilder.setRequestConfigCallback(conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis()))); + } + if (settings.hasValue(CLIENT_PATH_PREFIX)) { + restClientBuilder.setPathPrefix(settings.get(CLIENT_PATH_PREFIX)); + } + } +} diff --git a/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoHttpClientConfigCallbackHandler.java b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoHttpClientConfigCallbackHandler.java new file mode 100644 index 0000000000000..e5768d8f2e944 --- /dev/null +++ b/x-pack/qa/kerberos-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SpnegoHttpClientConfigCallbackHandler.java @@ -0,0 +1,317 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.kerberos; + +import org.apache.http.auth.AuthSchemeProvider; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.Credentials; +import org.apache.http.auth.KerberosCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.config.AuthSchemes; +import org.apache.http.config.Lookup; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.impl.auth.SPNegoSchemeFactory; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.client.RestClientBuilder.HttpClientConfigCallback; +import org.elasticsearch.common.settings.SecureString; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSException; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.GSSName; +import org.ietf.jgss.Oid; + +import java.io.IOException; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import javax.security.auth.Subject; +import javax.security.auth.callback.Callback; +import javax.security.auth.callback.CallbackHandler; +import javax.security.auth.callback.PasswordCallback; +import javax.security.auth.callback.UnsupportedCallbackException; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import javax.security.auth.login.LoginContext; + +/** + * This class implements {@link HttpClientConfigCallback} which allows for + * customization of {@link HttpAsyncClientBuilder}. + *

    + * Based on the configuration, configures {@link HttpAsyncClientBuilder} to + * support spengo auth scheme.
    + * It uses configured credentials either password or keytab for authentication. + */ +public class SpnegoHttpClientConfigCallbackHandler implements HttpClientConfigCallback { + private static final String SUN_KRB5_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule"; + private static final String CRED_CONF_NAME = "ESClientLoginConf"; + private static final Oid SPNEGO_OID = getSpnegoOid(); + + private static Oid getSpnegoOid() { + Oid oid = null; + try { + oid = new Oid("1.3.6.1.5.5.2"); + } catch (GSSException gsse) { + throw ExceptionsHelper.convertToRuntime(gsse); + } + return oid; + } + + private final String userPrincipalName; + private final SecureString password; + private final String keytabPath; + private final boolean enableDebugLogs; + private LoginContext loginContext; + + /** + * Constructs {@link SpnegoHttpClientConfigCallbackHandler} with given + * principalName and password. + * + * @param userPrincipalName user principal name + * @param password password for user + * @param enableDebugLogs if {@code true} enables kerberos debug logs + */ + public SpnegoHttpClientConfigCallbackHandler(final String userPrincipalName, final SecureString password, + final boolean enableDebugLogs) { + this.userPrincipalName = userPrincipalName; + this.password = password; + this.keytabPath = null; + this.enableDebugLogs = enableDebugLogs; + } + + /** + * Constructs {@link SpnegoHttpClientConfigCallbackHandler} with given + * principalName and keytab. + * + * @param userPrincipalName User principal name + * @param keytabPath path to keytab file for user + * @param enableDebugLogs if {@code true} enables kerberos debug logs + */ + public SpnegoHttpClientConfigCallbackHandler(final String userPrincipalName, final String keytabPath, final boolean enableDebugLogs) { + this.userPrincipalName = userPrincipalName; + this.keytabPath = keytabPath; + this.password = null; + this.enableDebugLogs = enableDebugLogs; + } + + @Override + public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { + setupSpnegoAuthSchemeSupport(httpClientBuilder); + return httpClientBuilder; + } + + private void setupSpnegoAuthSchemeSupport(HttpAsyncClientBuilder httpClientBuilder) { + final Lookup authSchemeRegistry = RegistryBuilder.create() + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory()).build(); + + final GSSManager gssManager = GSSManager.getInstance(); + try { + final GSSName gssUserPrincipalName = gssManager.createName(userPrincipalName, GSSName.NT_USER_NAME); + login(); + final AccessControlContext acc = AccessController.getContext(); + final GSSCredential credential = doAsPrivilegedWrapper(loginContext.getSubject(), + (PrivilegedExceptionAction) () -> gssManager.createCredential(gssUserPrincipalName, + GSSCredential.DEFAULT_LIFETIME, SPNEGO_OID, GSSCredential.INITIATE_ONLY), + acc); + + final KerberosCredentialsProvider credentialsProvider = new KerberosCredentialsProvider(); + credentialsProvider.setCredentials( + new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM, AuthSchemes.SPNEGO), + new KerberosCredentials(credential)); + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } catch (GSSException e) { + throw new RuntimeException(e); + } catch (PrivilegedActionException e) { + throw new RuntimeException(e.getCause()); + } + httpClientBuilder.setDefaultAuthSchemeRegistry(authSchemeRegistry); + } + + /** + * If logged in {@link LoginContext} is not available, it attempts login and + * returns {@link LoginContext} + * + * @return {@link LoginContext} + * @throws PrivilegedActionException + */ + public synchronized LoginContext login() throws PrivilegedActionException { + if (this.loginContext == null) { + AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + final Subject subject = new Subject(false, Collections.singleton(new KerberosPrincipal(userPrincipalName)), + Collections.emptySet(), Collections.emptySet()); + Configuration conf = null; + final CallbackHandler callback; + if (password != null) { + conf = new PasswordJaasConf(userPrincipalName, enableDebugLogs); + callback = new KrbCallbackHandler(userPrincipalName, password); + } else { + conf = new KeytabJaasConf(userPrincipalName, keytabPath, enableDebugLogs); + callback = null; + } + loginContext = new LoginContext(CRED_CONF_NAME, subject, callback, conf); + loginContext.login(); + return null; + }); + } + return loginContext; + } + + /** + * Privileged Wrapper that invokes action with Subject.doAs to perform work as + * given subject. + * + * @param subject {@link Subject} to be used for this work + * @param action {@link PrivilegedExceptionAction} action for performing inside + * Subject.doAs + * @param acc the {@link AccessControlContext} to be tied to the specified + * subject and action see + * {@link Subject#doAsPrivileged(Subject, PrivilegedExceptionAction, AccessControlContext) + * @return the value returned by the PrivilegedExceptionAction's run method + * @throws PrivilegedActionException + */ + static T doAsPrivilegedWrapper(final Subject subject, final PrivilegedExceptionAction action, final AccessControlContext acc) + throws PrivilegedActionException { + try { + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> Subject.doAsPrivileged(subject, action, acc)); + } catch (PrivilegedActionException pae) { + if (pae.getCause() instanceof PrivilegedActionException) { + throw (PrivilegedActionException) pae.getCause(); + } + throw pae; + } + } + + /** + * This class matches {@link AuthScope} and based on that returns + * {@link Credentials}. Only supports {@link AuthSchemes#SPNEGO} in + * {@link AuthScope#getScheme()} + */ + private static class KerberosCredentialsProvider implements CredentialsProvider { + private AuthScope authScope; + private Credentials credentials; + + @Override + public void setCredentials(AuthScope authscope, Credentials credentials) { + if (authscope.getScheme().regionMatches(true, 0, AuthSchemes.SPNEGO, 0, AuthSchemes.SPNEGO.length()) == false) { + throw new IllegalArgumentException("Only " + AuthSchemes.SPNEGO + " auth scheme is supported in AuthScope"); + } + this.authScope = authscope; + this.credentials = credentials; + } + + @Override + public Credentials getCredentials(AuthScope authscope) { + assert this.authScope != null && authscope != null; + return authscope.match(this.authScope) > -1 ? this.credentials : null; + } + + @Override + public void clear() { + this.authScope = null; + this.credentials = null; + } + } + + /** + * Jaas call back handler to provide credentials. + */ + private static class KrbCallbackHandler implements CallbackHandler { + private final String principal; + private final SecureString password; + + KrbCallbackHandler(final String principal, final SecureString password) { + this.principal = principal; + this.password = password; + } + + public void handle(final Callback[] callbacks) throws IOException, UnsupportedCallbackException { + for (Callback callback : callbacks) { + if (callback instanceof PasswordCallback) { + PasswordCallback pc = (PasswordCallback) callback; + if (pc.getPrompt().contains(principal)) { + pc.setPassword(password.getChars()); + break; + } + } + } + } + } + + /** + * Usually we would have a JAAS configuration file for login configuration. + * Instead of an additional file setting as we do not want the options to be + * customizable we are constructing it in memory. + *

    + * As we are using this instead of jaas.conf, this requires refresh of + * {@link Configuration} and reqires appropriate security permissions to do so. + */ + private static class PasswordJaasConf extends AbstractJaasConf { + + PasswordJaasConf(final String userPrincipalName, final boolean enableDebugLogs) { + super(userPrincipalName, enableDebugLogs); + } + + public void addOptions(final Map options) { + options.put("useTicketCache", Boolean.FALSE.toString()); + options.put("useKeyTab", Boolean.FALSE.toString()); + } + } + + /** + * Usually we would have a JAAS configuration file for login configuration. As + * we have static configuration except debug flag, we are constructing in + * memory. This avoids additional configuration required from the user. + *

    + * As we are using this instead of jaas.conf, this requires refresh of + * {@link Configuration} and requires appropriate security permissions to do so. + */ + private static class KeytabJaasConf extends AbstractJaasConf { + private final String keytabFilePath; + + KeytabJaasConf(final String userPrincipalName, final String keytabFilePath, final boolean enableDebugLogs) { + super(userPrincipalName, enableDebugLogs); + this.keytabFilePath = keytabFilePath; + } + + public void addOptions(final Map options) { + options.put("useKeyTab", Boolean.TRUE.toString()); + options.put("keyTab", keytabFilePath); + options.put("doNotPrompt", Boolean.TRUE.toString()); + } + + } + + private abstract static class AbstractJaasConf extends Configuration { + private final String userPrincipalName; + private final boolean enableDebugLogs; + + AbstractJaasConf(final String userPrincipalName, final boolean enableDebugLogs) { + this.userPrincipalName = userPrincipalName; + this.enableDebugLogs = enableDebugLogs; + } + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(final String name) { + final Map options = new HashMap<>(); + options.put("principal", userPrincipalName); + options.put("isInitiator", Boolean.TRUE.toString()); + options.put("storeKey", Boolean.TRUE.toString()); + options.put("debug", Boolean.toString(enableDebugLogs)); + addOptions(options); + return new AppConfigurationEntry[] { new AppConfigurationEntry(SUN_KRB5_LOGIN_MODULE, + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, Collections.unmodifiableMap(options)) }; + } + + abstract void addOptions(Map options); + } +} diff --git a/x-pack/qa/kerberos-tests/src/test/resources/plugin-security.policy b/x-pack/qa/kerberos-tests/src/test/resources/plugin-security.policy new file mode 100644 index 0000000000000..fb7936bf62093 --- /dev/null +++ b/x-pack/qa/kerberos-tests/src/test/resources/plugin-security.policy @@ -0,0 +1,4 @@ +grant { + permission javax.security.auth.AuthPermission "doAsPrivileged"; + permission javax.security.auth.kerberos.DelegationPermission "\"HTTP/localhost@BUILD.ELASTIC.CO\" \"krbtgt/BUILD.ELASTIC.CO@BUILD.ELASTIC.CO\""; +}; \ No newline at end of file diff --git a/x-pack/qa/ml-basic-multi-node/build.gradle b/x-pack/qa/ml-basic-multi-node/build.gradle index d396d38b22354..3df77aadccbd5 100644 --- a/x-pack/qa/ml-basic-multi-node/build.gradle +++ b/x-pack/qa/ml-basic-multi-node/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/qa/ml-disabled/build.gradle b/x-pack/qa/ml-disabled/build.gradle index 22a7dfc74ac73..e914def3507cd 100644 --- a/x-pack/qa/ml-disabled/build.gradle +++ b/x-pack/qa/ml-disabled/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/qa/ml-native-multi-node-tests/build.gradle b/x-pack/qa/ml-native-multi-node-tests/build.gradle index 286d4daee8aa5..b1893b20c465b 100644 --- a/x-pack/qa/ml-native-multi-node-tests/build.gradle +++ b/x-pack/qa/ml-native-multi-node-tests/build.gradle @@ -4,7 +4,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ml'), configuration: 'runtime') testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts') @@ -18,59 +18,45 @@ integTestRunner { systemProperty 'es.set.netty.runtime.available.processors', 'false' } -// location of generated keystores and certificates +// location for keys and certificates File keystoreDir = new File(project.buildDir, 'keystore') - -// Generate the node's keystore -File nodeKeystore = new File(keystoreDir, 'test-node.jks') -task createNodeKeyStore(type: LoggedExec) { - doFirst { - if (nodeKeystore.parentFile.exists() == false) { - nodeKeystore.parentFile.mkdirs() - } - if (nodeKeystore.exists()) { - delete nodeKeystore - } +File nodeKey = file("$keystoreDir/testnode.pem") +File nodeCert = file("$keystoreDir/testnode.crt") +// Add key and certs to test classpath: it expects it there +task copyKeyCerts(type: Copy) { + from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) { + include 'testnode.crt', 'testnode.pem' } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) - args '-genkey', - '-alias', 'test-node', - '-keystore', nodeKeystore, - '-keyalg', 'RSA', - '-keysize', '2048', - '-validity', '712', - '-dname', 'CN=smoke-test-plugins-ssl', - '-keypass', 'keypass', - '-storepass', 'keypass' + into keystoreDir } - -// Add keystores to test classpath: it expects it there +// Add keys and cets to test classpath: it expects it there sourceSets.test.resources.srcDir(keystoreDir) -processTestResources.dependsOn(createNodeKeyStore) +processTestResources.dependsOn(copyKeyCerts) integTestCluster { - dependsOn createNodeKeyStore + dependsOn copyKeyCerts setting 'xpack.security.enabled', 'true' setting 'xpack.ml.enabled', 'true' setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.security.authc.token.enabled', 'true' setting 'xpack.security.transport.ssl.enabled', 'true' - setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name + setting 'xpack.security.transport.ssl.key', nodeKey.name + setting 'xpack.security.transport.ssl.certificate', nodeCert.name setting 'xpack.security.transport.ssl.verification_mode', 'certificate' setting 'xpack.security.audit.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' keystoreSetting 'bootstrap.password', 'x-pack-test-password' - keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' numNodes = 3 setupCommand 'setupDummyUser', 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' - extraConfigFile nodeKeystore.name, nodeKeystore + extraConfigFile nodeKey.name, nodeKey + extraConfigFile nodeCert.name, nodeCert waitCondition = { node, ant -> File tmpFile = new File(node.cwd, 'wait.success') diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index c9956e6092fc5..74a026ed5adb4 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -124,9 +124,11 @@ protected Collection> transportClientPlugins() { @Override protected Settings externalClusterClientSettings() { - Path keyStore; + Path key; + Path certificate; try { - keyStore = PathUtils.get(getClass().getResource("/test-node.jks").toURI()); + key = PathUtils.get(getClass().getResource("/testnode.pem").toURI()); + certificate = PathUtils.get(getClass().getResource("/testnode.crt").toURI()); } catch (URISyntaxException e) { throw new IllegalStateException("error trying to get keystore path", e); } @@ -135,8 +137,9 @@ protected Settings externalClusterClientSettings() { builder.put(SecurityField.USER_SETTING.getKey(), "x_pack_rest_user:" + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING); builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), true); builder.put("xpack.security.transport.ssl.enabled", true); - builder.put("xpack.security.transport.ssl.keystore.path", keyStore.toAbsolutePath().toString()); - builder.put("xpack.security.transport.ssl.keystore.password", "keypass"); + builder.put("xpack.security.transport.ssl.key", key.toAbsolutePath().toString()); + builder.put("xpack.security.transport.ssl.certificate", certificate.toAbsolutePath().toString()); + builder.put("xpack.security.transport.ssl.key_passphrase", "testnode"); builder.put("xpack.security.transport.ssl.verification_mode", "certificate"); return builder.build(); } diff --git a/x-pack/qa/ml-no-bootstrap-tests/build.gradle b/x-pack/qa/ml-no-bootstrap-tests/build.gradle index cad5201a67b6a..7e252afa3022e 100644 --- a/x-pack/qa/ml-no-bootstrap-tests/build.gradle +++ b/x-pack/qa/ml-no-bootstrap-tests/build.gradle @@ -1,7 +1,6 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('ml'), configuration: 'runtime') } - diff --git a/x-pack/qa/ml-single-node-tests/build.gradle b/x-pack/qa/ml-single-node-tests/build.gradle index 9fd4a8d44d23f..b62e37894b3c3 100644 --- a/x-pack/qa/ml-single-node-tests/build.gradle +++ b/x-pack/qa/ml-single-node-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/x-pack/qa/multi-cluster-search-security/build.gradle b/x-pack/qa/multi-cluster-search-security/build.gradle index f5265466965c1..5d90f974762bc 100644 --- a/x-pack/qa/multi-cluster-search-security/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/build.gradle @@ -3,7 +3,7 @@ import org.elasticsearch.gradle.test.RestIntegTestTask apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/multi-node/build.gradle b/x-pack/qa/multi-node/build.gradle index 69c7a696ff0be..19729cf367ef5 100644 --- a/x-pack/qa/multi-node/build.gradle +++ b/x-pack/qa/multi-node/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') } integTestCluster { diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 5c0399a1d9fa2..24cd6184afa63 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -5,7 +5,7 @@ apply plugin: 'elasticsearch.standalone-test' apply plugin: 'elasticsearch.vagrantsupport' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } @@ -32,4 +32,3 @@ namingConventions { // integ tests use Tests instead of IT skipIntegTestInDisguise = true } - diff --git a/x-pack/qa/reindex-tests-with-security/build.gradle b/x-pack/qa/reindex-tests-with-security/build.gradle index ddf72f7d45833..097d343b27984 100644 --- a/x-pack/qa/reindex-tests-with-security/build.gradle +++ b/x-pack/qa/reindex-tests-with-security/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: ':modules:reindex') diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 6d5b250b460b7..21ac4414d86b2 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -7,7 +7,7 @@ import java.nio.charset.StandardCharsets apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // to be moved in a later commit } diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 351f33b941227..b983caa866937 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.build' test.enabled = false dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // to be moved in a later commit } @@ -107,7 +107,7 @@ subprojects { String output = "${buildDir}/generated-resources/${project.name}" task copyTestNodeKeystore(type: Copy) { - from project(xpackModule('core')) + from project(':x-pack:plugin:core') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') into outputDir } @@ -284,7 +284,7 @@ subprojects { check.dependsOn(integTest) dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('watcher')) } diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index ebb0d88991ff6..752ec6fb3071b 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -6,7 +6,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile 'com.google.jimfs:jimfs:1.1' @@ -84,4 +84,3 @@ thirdPartyAudit.excludes = [ // missing 'com.ibm.icu.lang.UCharacter' ] - diff --git a/x-pack/qa/security-client-tests/build.gradle b/x-pack/qa/security-client-tests/build.gradle index 4e517f4d3633e..97945fb00efcd 100644 --- a/x-pack/qa/security-client-tests/build.gradle +++ b/x-pack/qa/security-client-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-example-spi-extension/build.gradle b/x-pack/qa/security-example-spi-extension/build.gradle index b2fac075cb315..7aeed3ad62de6 100644 --- a/x-pack/qa/security-example-spi-extension/build.gradle +++ b/x-pack/qa/security-example-spi-extension/build.gradle @@ -8,7 +8,7 @@ esplugin { } dependencies { - compileOnly project(path: xpackModule('core'), configuration: 'runtime') + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-migrate-tests/build.gradle b/x-pack/qa/security-migrate-tests/build.gradle index 7ccf6d2349b8b..3a8a0cf100554 100644 --- a/x-pack/qa/security-migrate-tests/build.gradle +++ b/x-pack/qa/security-migrate-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/qa/security-setup-password-tests/build.gradle b/x-pack/qa/security-setup-password-tests/build.gradle index 16accc87a9476..adb159acf6f6b 100644 --- a/x-pack/qa/security-setup-password-tests/build.gradle +++ b/x-pack/qa/security-setup-password-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/smoke-test-graph-with-security/build.gradle b/x-pack/qa/smoke-test-graph-with-security/build.gradle index d3f788d0b06e4..9cdfaffccfbce 100644 --- a/x-pack/qa/smoke-test-graph-with-security/build.gradle +++ b/x-pack/qa/smoke-test-graph-with-security/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') } // bring in graph rest test suite diff --git a/x-pack/qa/smoke-test-ml-with-security/build.gradle b/x-pack/qa/smoke-test-ml-with-security/build.gradle index 2a12aa2f28d3f..84c23add25411 100644 --- a/x-pack/qa/smoke-test-ml-with-security/build.gradle +++ b/x-pack/qa/smoke-test-ml-with-security/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } diff --git a/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle b/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle index 4e079430562a7..8ce0cde76575a 100644 --- a/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle +++ b/x-pack/qa/smoke-test-monitoring-with-watcher/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core')) + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('watcher')) testCompile project(path: xpackModule('monitoring')) } @@ -24,4 +24,3 @@ integTestCluster { // one of the exporters should configure cluster alerts // setting 'xpack.monitoring.exporters.my_http.cluster_alerts.management.enabled', 'true' } - diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index 595c562af3707..53533bd9b87f3 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -4,7 +4,7 @@ import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.elasticsearch.gradle.test.NodeInfo import javax.net.ssl.HttpsURLConnection -import javax.net.ssl.KeyManagerFactory +import javax.net.ssl.KeyManager import javax.net.ssl.SSLContext import javax.net.ssl.TrustManagerFactory import java.nio.charset.StandardCharsets @@ -15,7 +15,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') } String outputDir = "${buildDir}/generated-resources/${project.name}" @@ -26,135 +26,27 @@ task copyXPackPluginProps(type: Copy) { } project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) -// needed to be consistent with ssl host checking -Object san = new SanEvaluator() - // location of generated keystores and certificates File keystoreDir = new File(project.buildDir, 'keystore') +File nodeKeystore = file("$keystoreDir/testnode.jks") +File nodeKey = file("$keystoreDir/testnode.pem") +File nodeCert = file("$keystoreDir/testnode.crt") +File clientKeyStore = file("$keystoreDir/testclient.jks") +File clientKey = file("$keystoreDir/testclient.pem") +File clientCert = file("$keystoreDir/testclient.crt") -// Generate the node's keystore -File nodeKeystore = new File(keystoreDir, 'test-node.jks') -task createNodeKeyStore(type: LoggedExec) { - doFirst { - if (nodeKeystore.parentFile.exists() == false) { - nodeKeystore.parentFile.mkdirs() - } - if (nodeKeystore.exists()) { - delete nodeKeystore - } - } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) - args '-genkey', - '-alias', 'test-node', - '-keystore', nodeKeystore, - '-keyalg', 'RSA', - '-keysize', '2048', - '-validity', '712', - '-dname', 'CN=smoke-test-plugins-ssl', - '-keypass', 'keypass', - '-storepass', 'keypass', - '-ext', san -} - -// Generate the client's keystore -File clientKeyStore = new File(keystoreDir, 'test-client.jks') -task createClientKeyStore(type: LoggedExec) { - doFirst { - if (clientKeyStore.parentFile.exists() == false) { - clientKeyStore.parentFile.mkdirs() - } - if (clientKeyStore.exists()) { - delete clientKeyStore - } - } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) - args '-genkey', - '-alias', 'test-client', - '-keystore', clientKeyStore, - '-keyalg', 'RSA', - '-keysize', '2048', - '-validity', '712', - '-dname', 'CN=smoke-test-plugins-ssl', - '-keypass', 'keypass', - '-storepass', 'keypass', - '-ext', san -} - -// Export the node's certificate -File nodeCertificate = new File(keystoreDir, 'test-node.cert') -task exportNodeCertificate(type: LoggedExec) { - dependsOn createNodeKeyStore - doFirst { - if (nodeCertificate.parentFile.exists() == false) { - nodeCertificate.parentFile.mkdirs() - } - if (nodeCertificate.exists()) { - delete nodeCertificate - } - } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - args '-export', - '-alias', 'test-node', - '-keystore', nodeKeystore, - '-storepass', 'keypass', - '-file', nodeCertificate -} - -// Import the node certificate in the client's keystore -task importNodeCertificateInClientKeyStore(type: LoggedExec) { - dependsOn createClientKeyStore, exportNodeCertificate - executable = new File(project.runtimeJavaHome, 'bin/keytool') - args '-import', - '-alias', 'test-node', - '-keystore', clientKeyStore, - '-storepass', 'keypass', - '-file', nodeCertificate, - '-noprompt' -} - -// Export the client's certificate -File clientCertificate = new File(keystoreDir, 'test-client.cert') -task exportClientCertificate(type: LoggedExec) { - dependsOn createClientKeyStore - doFirst { - if (clientCertificate.parentFile.exists() == false) { - clientCertificate.parentFile.mkdirs() - } - if (clientCertificate.exists()) { - delete clientCertificate - } +// Add keystores to test classpath: it expects it there +task copyKeyCerts(type: Copy) { + from('./') { + include '*.crt', '*.pem', '*.jks' } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - args '-export', - '-alias', 'test-client', - '-keystore', clientKeyStore, - '-storepass', 'keypass', - '-file', clientCertificate -} - -// Import the client certificate in the node's keystore -task importClientCertificateInNodeKeyStore(type: LoggedExec) { - dependsOn createNodeKeyStore, exportClientCertificate - executable = new File(project.runtimeJavaHome, 'bin/keytool') - args '-import', - '-alias', 'test-client', - '-keystore', nodeKeystore, - '-storepass', 'keypass', - '-file', clientCertificate, - '-noprompt' + into keystoreDir } - -forbiddenPatterns { - exclude '**/*.cert' -} - // Add keystores to test classpath: it expects it there sourceSets.test.resources.srcDir(keystoreDir) -processTestResources.dependsOn(importNodeCertificateInClientKeyStore, importClientCertificateInNodeKeyStore) +processTestResources.dependsOn(copyKeyCerts) -integTestCluster.dependsOn(importClientCertificateInNodeKeyStore, importNodeCertificateInClientKeyStore) +integTestCluster.dependsOn(copyKeyCerts) ext.pluginsCount = 0 project(':plugins').getChildProjects().each { pluginName, pluginProject -> @@ -167,8 +59,7 @@ integTestCluster { setting 'xpack.monitoring.collection.interval', '1s' setting 'xpack.monitoring.exporters._http.type', 'http' setting 'xpack.monitoring.exporters._http.enabled', 'false' - setting 'xpack.monitoring.exporters._http.ssl.truststore.path', clientKeyStore.name - setting 'xpack.monitoring.exporters._http.ssl.truststore.password', 'keypass' + setting 'xpack.ssl.certificate_authorities', 'testnode.crt' setting 'xpack.monitoring.exporters._http.auth.username', 'monitoring_agent' setting 'xpack.monitoring.exporters._http.auth.password', 'x-pack-test-password' setting 'xpack.monitoring.exporters._http.ssl.verification_mode', 'full' @@ -176,14 +67,18 @@ integTestCluster { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' setting 'xpack.security.http.ssl.enabled', 'true' - setting 'xpack.security.http.ssl.keystore.path', nodeKeystore.name - keystoreSetting 'xpack.security.http.ssl.keystore.secure_password', 'keypass' + setting 'xpack.security.http.ssl.key', 'testnode.pem' + setting 'xpack.security.http.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.http.ssl.secure_key_passphrase', 'testnode' setting 'xpack.ml.enabled', 'false' - - // copy keystores into config/ + // copy keystores, keys and certificates into config/ extraConfigFile nodeKeystore.name, nodeKeystore + extraConfigFile nodeKey.name, nodeKey + extraConfigFile nodeCert.name, nodeCert extraConfigFile clientKeyStore.name, clientKeyStore + extraConfigFile clientKey.name, clientKey + extraConfigFile clientCert.name, clientCert setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' @@ -193,13 +88,12 @@ integTestCluster { waitCondition = { NodeInfo node, AntBuilder ant -> File tmpFile = new File(node.cwd, 'wait.success') KeyStore keyStore = KeyStore.getInstance("JKS"); - keyStore.load(clientKeyStore.newInputStream(), 'keypass'.toCharArray()); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(keyStore, 'keypass'.toCharArray()); + keyStore.load(clientKeyStore.newInputStream(), 'testclient'.toCharArray()); TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(keyStore); + // We don't need a KeyManager as there won't be client auth required so pass an empty array SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom()); for (int i = 0; i < 10; i++) { // we use custom wait logic here for HTTPS HttpsURLConnection httpURLConnection = null; @@ -244,160 +138,4 @@ processTestResources { inputs.properties(expansions) MavenFilteringHack.filter(it, expansions) } -} - -/** A lazy evaluator to find the san to use for certificate generation. */ -class SanEvaluator { - - private static String san = null - - String toString() { - synchronized (SanEvaluator.class) { - if (san == null) { - san = getSubjectAlternativeNameString() - } - } - return san - } - - // Code stolen from NetworkUtils/InetAddresses/NetworkAddress to support SAN - /** Return all interfaces (and subinterfaces) on the system */ - private static List getInterfaces() throws SocketException { - List all = new ArrayList<>(); - addAllInterfaces(all, Collections.list(NetworkInterface.getNetworkInterfaces())); - Collections.sort(all, new Comparator() { - @Override - public int compare(NetworkInterface left, NetworkInterface right) { - return Integer.compare(left.getIndex(), right.getIndex()); - } - }); - return all; - } - - /** Helper for getInterfaces, recursively adds subinterfaces to {@code target} */ - private static void addAllInterfaces(List target, List level) { - if (!level.isEmpty()) { - target.addAll(level); - for (NetworkInterface intf : level) { - addAllInterfaces(target, Collections.list(intf.getSubInterfaces())); - } - } - } - - private static String getSubjectAlternativeNameString() { - List list = new ArrayList<>(); - for (NetworkInterface intf : getInterfaces()) { - if (intf.isUp()) { - // NOTE: some operating systems (e.g. BSD stack) assign a link local address to the loopback interface - // while technically not a loopback address, some of these treat them as one (e.g. OS X "localhost") so we must too, - // otherwise things just won't work out of box. So we include all addresses from loopback interfaces. - for (InetAddress address : Collections.list(intf.getInetAddresses())) { - if (intf.isLoopback() || address.isLoopbackAddress()) { - list.add(address); - } - } - } - } - if (list.isEmpty()) { - throw new IllegalArgumentException("no up-and-running loopback addresses found, got " + getInterfaces()); - } - - StringBuilder builder = new StringBuilder("san="); - for (int i = 0; i < list.size(); i++) { - InetAddress address = list.get(i); - String hostAddress; - if (address instanceof Inet6Address) { - hostAddress = compressedIPV6Address((Inet6Address)address); - } else { - hostAddress = address.getHostAddress(); - } - builder.append("ip:").append(hostAddress); - String hostname = address.getHostName(); - if (hostname.equals(address.getHostAddress()) == false) { - builder.append(",dns:").append(hostname); - } - - if (i != (list.size() - 1)) { - builder.append(","); - } - } - - return builder.toString(); - } - - private static String compressedIPV6Address(Inet6Address inet6Address) { - byte[] bytes = inet6Address.getAddress(); - int[] hextets = new int[8]; - for (int i = 0; i < hextets.length; i++) { - hextets[i] = (bytes[2 * i] & 255) << 8 | bytes[2 * i + 1] & 255; - } - compressLongestRunOfZeroes(hextets); - return hextetsToIPv6String(hextets); - } - - /** - * Identify and mark the longest run of zeroes in an IPv6 address. - * - *

    Only runs of two or more hextets are considered. In case of a tie, the - * leftmost run wins. If a qualifying run is found, its hextets are replaced - * by the sentinel value -1. - * - * @param hextets {@code int[]} mutable array of eight 16-bit hextets - */ - private static void compressLongestRunOfZeroes(int[] hextets) { - int bestRunStart = -1; - int bestRunLength = -1; - int runStart = -1; - for (int i = 0; i < hextets.length + 1; i++) { - if (i < hextets.length && hextets[i] == 0) { - if (runStart < 0) { - runStart = i; - } - } else if (runStart >= 0) { - int runLength = i - runStart; - if (runLength > bestRunLength) { - bestRunStart = runStart; - bestRunLength = runLength; - } - runStart = -1; - } - } - if (bestRunLength >= 2) { - Arrays.fill(hextets, bestRunStart, bestRunStart + bestRunLength, -1); - } - } - - /** - * Convert a list of hextets into a human-readable IPv6 address. - * - *

    In order for "::" compression to work, the input should contain negative - * sentinel values in place of the elided zeroes. - * - * @param hextets {@code int[]} array of eight 16-bit hextets, or -1s - */ - private static String hextetsToIPv6String(int[] hextets) { - /* - * While scanning the array, handle these state transitions: - * start->num => "num" start->gap => "::" - * num->num => ":num" num->gap => "::" - * gap->num => "num" gap->gap => "" - */ - StringBuilder buf = new StringBuilder(39); - boolean lastWasNumber = false; - for (int i = 0; i < hextets.length; i++) { - boolean thisIsNumber = hextets[i] >= 0; - if (thisIsNumber) { - if (lastWasNumber) { - buf.append(':'); - } - buf.append(Integer.toHexString(hextets[i])); - } else { - if (i == 0 || lastWasNumber) { - buf.append("::"); - } - } - lastWasNumber = thisIsNumber; - } - return buf.toString(); - } -} +} \ No newline at end of file diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java index 8411a7eb5a4e7..9cb45472ed39c 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsSslClientYamlTestSuiteIT.java @@ -29,7 +29,7 @@ public class SmokeTestPluginsSslClientYamlTestSuiteIT extends ESClientYamlSuiteT private static final String USER = "test_user"; private static final String PASS = "x-pack-test-password"; - private static final String KEYSTORE_PASS = "keypass"; + private static final String KEYSTORE_PASS = "testnode"; public SmokeTestPluginsSslClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); @@ -45,7 +45,7 @@ public static Iterable parameters() throws Exception { @BeforeClass public static void getKeyStore() { try { - keyStore = PathUtils.get(SmokeTestPluginsSslClientYamlTestSuiteIT.class.getResource("/test-node.jks").toURI()); + keyStore = PathUtils.get(SmokeTestPluginsSslClientYamlTestSuiteIT.class.getResource("/testnode.jks").toURI()); } catch (URISyntaxException e) { throw new ElasticsearchException("exception while reading the store", e); } diff --git a/x-pack/qa/smoke-test-plugins-ssl/testclient.crt b/x-pack/qa/smoke-test-plugins-ssl/testclient.crt new file mode 100644 index 0000000000000..18221208c162e --- /dev/null +++ b/x-pack/qa/smoke-test-plugins-ssl/testclient.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID1zCCAr+gAwIBAgIJALnUl/KSS74pMA0GCSqGSIb3DQEBCwUAMEoxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEiMCAGA1UEAxMZRWxhc3Rp +Y3NlYXJjaCBUZXN0IENsaWVudDAeFw0xNTA5MjMxODUyNTVaFw0xOTA5MjIxODUy +NTVaMEoxDDAKBgNVBAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEiMCAG +A1UEAxMZRWxhc3RpY3NlYXJjaCBUZXN0IENsaWVudDCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAMKm+P6vDAff0c6BWKGdhnYoNl9HijLIgfU3d9CQcqKt +wT+yUW3DPSVjIfaLmDIGj6Hl8jTHWPB7ZP4fzhrPi6m4qlRGclJMECBuNASZFiPD +tEDv3msoeqOKQet6n7PZvgpWM7hxYZO4P1aMKJtRsFAdvBAdZUnv0spR5G4UZTHz +SKmMeanIKFkLaD0XVKiLQu9/z9M6roDQeAEoCJ/8JsanG8ih2ymfPHIZuNyYIOrV +ekHN2zU6bnVn8/PCeZSjS6h5xYw+Jl5gzGI/n+F5CZ+THoH8pM4pGp6xRVzpiH12 +gvERGwgSIDXdn/+uZZj+4lE7n2ENRSOt5KcOGG99r60CAwEAAaOBvzCBvDAJBgNV +HRMEAjAAMB0GA1UdDgQWBBSSFhBXNp7AaNrHdlgCV0mCEzt7ajCBjwYDVR0RBIGH +MIGEgglsb2NhbGhvc3SCFWxvY2FsaG9zdC5sb2NhbGRvbWFpboIKbG9jYWxob3N0 +NIIXbG9jYWxob3N0NC5sb2NhbGRvbWFpbjSCCmxvY2FsaG9zdDaCF2xvY2FsaG9z +dDYubG9jYWxkb21haW42hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0GCSqGSIb3 +DQEBCwUAA4IBAQANvAkddfLxn4/BCY4LY/1ET3d7ZRldjFTyjjHRYJ3CYBXWVahM +skLxIcFNca8YjKfXoX8mcK+NQK/dAbGHXqk76yMlkrKjh1OQiZ1YAX5ryYerGrZ9 +9N3E9wnbn72bW3iumoLlqmTWlHEpMI0Ql6J75BQLTgKHxCPupVA5sTbWkKwGjXXA +i84rUlzhDJOR8jk3/7ct0iZO8Hk6AWMcNix5Wka3IDGUXuEVevYRlxgVyCxcnZWC +7JWREpar5aIPQFkY6VCEglxwUyXbHZw5T/u6XaKKnS7gz8RiwRh68ddSQJeEHi5e +4onUD7bOCJgfsiUwdiCkDbfN9Yum8OIpmBRs +-----END CERTIFICATE----- diff --git a/x-pack/qa/smoke-test-plugins-ssl/testclient.jks b/x-pack/qa/smoke-test-plugins-ssl/testclient.jks new file mode 100644 index 0000000000000..d6dc21c1bd5ff Binary files /dev/null and b/x-pack/qa/smoke-test-plugins-ssl/testclient.jks differ diff --git a/x-pack/qa/smoke-test-plugins-ssl/testclient.pem b/x-pack/qa/smoke-test-plugins-ssl/testclient.pem new file mode 100644 index 0000000000000..7268c55dba977 --- /dev/null +++ b/x-pack/qa/smoke-test-plugins-ssl/testclient.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,C98A45E4AFC263C2 + +wLuUEXldYc54r4ryWd6jw6UMGYwn6+ibGKHp4sD92l42lmI2UrCT/Mb/E0O+KMMy +pHgc5/dBWkXgMiqDyLIhHk4kgT40rdw5W5lZkAA4Qt/Yzd+rbscTvzp09zrF6Fll +czgoE7FrvhOKiEOakerTit4pIPYosdX606cpVQE2lq9oZs9HVMcLzdAZj8A/P/4g +fo4X3+zqVYC/LH4n00bhNoeeej2o1lEJ+l9u9hptT2ATXle6pANa83Ldg4OxJyj8 +dkR9ahnAMCvYTSjEU7nwmGNPeFX0PIUjJKQivr410cYG104DC30Yy+XrIUfjTVUi +agwlMpHoBq79/ZRUJR3xPLkIGgw4g+RPt45D9eKsEsV4vqy8SFlgaoJ2mKUKleZy +i7D9ouzMKQ3sYE4eQVQ5o3K8ZPn5eozCwCVIp7jGSsuvDpLA9peZSwWPfc5y8JFD +/64usCt1J8Mv/e9NVllC8ZA+ZmDitTiwLZysczpMOaFqqeUbk9EJst38n4nBzRV2 +quxvg9W/iveQIydFyftCtNfRkpbp0NCsLz293dBYwZacHsPcY27IBCwXHiICjiAW +q7bnisXsgSaQMhMNRGW9YElZGb7ZWxoIzcyNBisGI8zxn48ObERVOmkOFxY/gs9T +YmpVMliWtmRG6hb6iCh9b7z8THRquxgTGE9ZFBwtLUKg33aubtgAfnUh/Xq2Ue5K +l+ZCqDGEi/FSIjVENUNNntAx/vXeNPbkoGLb/HSJwAh+sjpaLGQ54xixCtE9l3NY +o2QAiZ804KLPaGtbbOv7wPumxQ+8mxG5FN0hTRrsMW9t8pBXw47iMy/T2H21TD5D +E5XbM6kFeBrnsWnZJ2/ieXqDE4SX0tm3WEvZlDg7N7jV8QDM/D3Xdkb/sqJRabMG +tQRgwkLiB+mZ5MAfGLogI2/lOEayrBVz4qYdXojewxY4LtaZ5HiUIlyA9CJelMvD +nS52I6+FpaFhvuZC10qaM9Ph9TNyx+XKRUsPILuDiBRnYiHUKs1qASl5tjn2yyjM +71WSo7A7btOckzhDZdMVf1T472f0LGsRYoQebMhotqCuR7yArZHzTeWB0CjL3tOz +j3QlhKt2E1jx43bSK5tBasd9Bpmn2onvdwu1RRP8cyQBsXJSDy4/8t/g63+C3wod +8VPrlKhK+TenK9EoEqJ2mNuNq+duOjTXfK/7GM5s0BFKv+i2ckpDi1NPckd2gXjF +yUFZhmK6k0WC4jjWloMt+WQpi1rXMEXwCypgTrqWbvD0p6+X3uQmP57L4yHQcZoW +Qcs5GnihJ0DIhw9vYDhBhNo0WY1oBO20nVCN3R/JIpp3uDtg64WvfvMSXzJIPBCY +s+/GM5TtuD6mERDu3+qXxWwiy4PMQRcgjRTMEZ3A4Iv77YfQRkcd6S9qjUUuR/5D +xs+J4ryb1biz9ofW7I+Dbz4SArWSgwcuh14AV9RBv6Rh9m83rjT2K0yvbe/+7hHW +R8nzRMqJcGNGCHmRjA/cwoiv6+k2J/RbCJqnR3RmNex/85XaXBfZwRfHXVbzZQfa +SrFaaNLf1hMwGLAJjIcQRxa3yZbjFXVx1Bp4hh8rKNWaOItjavNtNg== +-----END RSA PRIVATE KEY----- diff --git a/x-pack/qa/smoke-test-plugins-ssl/testnode.crt b/x-pack/qa/smoke-test-plugins-ssl/testnode.crt new file mode 100644 index 0000000000000..08c160bcea5ff --- /dev/null +++ b/x-pack/qa/smoke-test-plugins-ssl/testnode.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID0zCCArugAwIBAgIJALi5bDfjMszLMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTUwOTIzMTg1MjU3WhcNMTkwOTIyMTg1MjU3 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA3rGZ1QbsW0+MuyrSLmMfDFKtLBkIFW8V0gRuurFg1PUKKNR1 +Mq2tMVwjjYETAU/UY0iKZOzjgvYPKhDTYBTte/WHR1ZK4CYVv7TQX/gtFQG/ge/c +7u0sLch9p7fbd+/HZiLS/rBEZDIohvgUvzvnA8+OIYnw4kuxKo/5iboAIS41klMg +/lATm8V71LMY68inht71/ZkQoAHKgcR9z4yNYvQ1WqKG8DG8KROXltll3sTrKbl5 +zJhn660es/1ZnR6nvwt6xnSTl/mNHMjkfv1bs4rJ/py3qPxicdoSIn/KyojUcgHV +F38fuAy2CQTdjVG5fWj9iz+mQvLm3+qsIYQdFwIDAQABo4G/MIG8MAkGA1UdEwQC +MAAwHQYDVR0OBBYEFEMMWLWQi/g83PzlHYqAVnty5L7HMIGPBgNVHREEgYcwgYSC +CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds +b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s +b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL +BQADggEBAMjGGXT8Nt1tbl2GkiKtmiuGE2Ej66YuZ37WSJViaRNDVHLlg87TCcHe +k2rdO+6sFqQbbzEfwQ05T7xGmVu7tm54HwKMRugoQ3wct0bQC5wEWYN+oMDvSyO6 +M28mZwWb4VtR2IRyWP+ve5DHwTM9mxWa6rBlGzsQqH6YkJpZojzqk/mQTug+Y8aE +mVoqRIPMHq9ob+S9qd5lp09+MtYpwPfTPx/NN+xMEooXWW/ARfpGhWPkg/FuCu4z +1tFmCqHgNcWirzMm3dQpF78muE9ng6OB2MXQwL4VgnVkxmlZNHbkR2v/t8MyZJxC +y4g6cTMM3S/UMt5/+aIB2JAuMKyuD+A= +-----END CERTIFICATE----- diff --git a/x-pack/qa/smoke-test-plugins-ssl/testnode.jks b/x-pack/qa/smoke-test-plugins-ssl/testnode.jks new file mode 100644 index 0000000000000..ebe6146124e8f Binary files /dev/null and b/x-pack/qa/smoke-test-plugins-ssl/testnode.jks differ diff --git a/x-pack/qa/smoke-test-plugins-ssl/testnode.pem b/x-pack/qa/smoke-test-plugins-ssl/testnode.pem new file mode 100644 index 0000000000000..5a67e1033440d --- /dev/null +++ b/x-pack/qa/smoke-test-plugins-ssl/testnode.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,9D867F7E0C94D013 + +dVoVCjPeg1wgS7rVtOvGfQcrZyLkx393aWRnFq45tbjKBVuITtJ9vI7o4QXOV/15 +Gnb6WhXGIdWrzsxEAd46K6hIuNSISd4Emsx6c2Q5hTqWXXfexbOZBNfTtXtdJPnJ +1jAaikhtztLo3JSLTKNY5sNxd+XbaQyYVUWvueK6zOaIIMETvB+VPVFd9i1ROibk +Sgdtyj01KjkoalifqK/tA0CIYNKL0S6/eoK3UhAlpIprlpV+cnXa940C6bjLeJPt +PMAGGp5RrplxSgrSerw3I9DOWkHGtpqzIka3XneNUXJP8k4HUJ+aZkGH2ZILKS8d +4KMIb+KZSpHEGn+6uGccWLtZZmAjWJrDw56JbQtSHdRYLBRSOjLbTvQoPu/2Hpli +7HOxbotlvjptMunncq5aqK57SHA1dh0cwF7J3LUmGFJ67eoz+VV3b5qMn4MopSeI +mS16Ydd3nGpjSrln/elM0CQxqWfcOAXRZpDpFUQoXcBrLVzvz2DBl/0CrTRLhgzi +CO+5/IVcBWRlYpRNGgjjP7q0j6URID3jk5J06fYQXmBiwQT5j+GZqqzpMCJ9mIy2 +1O9SN1hebJnIcEU+E0njn/MGjlYdPywhaCy8pqElp6Q8TUEJpwLRFO/owCoBet/n +ZmCXUjfCGhc1pWHufFcDEQ6xMgEWWY/tdwCZeSU7EhErTjCbfupg+55A5fpDml0m +3wH4CFcuRjlqyx6Ywixm1ATeitDtJl5HQTw6b8OtEXwSgRmZ0eSqSRVk9QbVS7gu +IpQe09/Zimb5HzjZqZ3fdqHlcW4xax8hyJeyIvF5ZJ57eY8CBvu/wP2GDn26QnvF +xQqdfDbq1H4JmpwUHpbFwBoQK4Q6WFd1z4EA9bRQeo3H9PoqoOwMDjzajwLRF7b7 +q6tYH/n9PyHwdf1c4fFwgSmL1toXGfKlA9hjIaLsRSDD6srT5EdUk78bsnddwI51 +tu7C7P4JG+h1VdRNMNTlqtileWsIE7Nn2A1OkcUxZdF5mamENpDpJcHePLto6c8q +FKiwyFMsxhgsj6HK2HqO+UA4sX5Ni4oHwiPmb//EZLn045M5i1AN26KosJmb8++D +sgR5reWRy+UqJCTYblVg+7Dx++ggUnfxVyQEsWmw5r5f4KU5wXBkvoVMGtPNa9DE +n/uLtObD1qkNL38pRsr2OGRchYCgEoKGqEISBP4knfGXLOlWiW/246j9QzI97r1u +tvy7fKg28G7AUz9l6bpewsPHefBUeRQeieP9eJINaEpxkF/w2RpKDLpQjWxwDDOM +s+D0mrBMJve17AmJ8rMw6dIQPZYNZ88/jz1uQuUwQ2YlbmtZbCG81k9YMFGEU9XS +cyhJxj8hvYnt2PR5Z9/cJPyWOs0m/ufOeeQQ8SnU/lzmrQnpzUd2Z6p5i/B7LdRP +n1kX+l1qynuPnjvBz4nJQE0p6nzW8RyCDSniC9mtYtZmhgC8icqxgbvS7uEOBIYJ +NbK+0bEETTO34iY/JVTIqLOw3iQZYMeUpxpj6Phgx/oooxMTquMecPKNgeVtaBst +qjTNPX0ti1/HYpZqzYi8SV8YjHSJWCVMsZjKPr3W/HIcCKqYoIfgzi83Ha2KMQx6 +-----END RSA PRIVATE KEY----- diff --git a/x-pack/qa/smoke-test-plugins/build.gradle b/x-pack/qa/smoke-test-plugins/build.gradle index 4badc9d3509c8..b66903af18bfb 100644 --- a/x-pack/qa/smoke-test-plugins/build.gradle +++ b/x-pack/qa/smoke-test-plugins/build.gradle @@ -4,7 +4,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') } ext.pluginsCount = 0 diff --git a/x-pack/qa/smoke-test-security-with-mustache/build.gradle b/x-pack/qa/smoke-test-security-with-mustache/build.gradle index 1c43db0b63e34..d921c5f5b6605 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/build.gradle +++ b/x-pack/qa/smoke-test-security-with-mustache/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') } diff --git a/x-pack/qa/smoke-test-watcher-with-security/build.gradle b/x-pack/qa/smoke-test-watcher-with-security/build.gradle index 0f052074bfbf6..a843641be801f 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/build.gradle +++ b/x-pack/qa/smoke-test-watcher-with-security/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') } // bring in watcher rest test suite diff --git a/x-pack/qa/smoke-test-watcher/build.gradle b/x-pack/qa/smoke-test-watcher/build.gradle index abfd27e729b6d..dc87248df617f 100644 --- a/x-pack/qa/smoke-test-watcher/build.gradle +++ b/x-pack/qa/smoke-test-watcher/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') testCompile project(path: ':modules:lang-painless', configuration: 'runtime') diff --git a/x-pack/qa/sql/security/build.gradle b/x-pack/qa/sql/security/build.gradle index 15f7734f9422e..f02886f80a103 100644 --- a/x-pack/qa/sql/security/build.gradle +++ b/x-pack/qa/sql/security/build.gradle @@ -1,5 +1,5 @@ dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(path: xpackModule('core'), configuration: 'shadow') } Project mainProject = project @@ -20,7 +20,7 @@ subprojects { } dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(path: xpackModule('core'), configuration: 'shadow') } integTestCluster { diff --git a/x-pack/qa/sql/security/ssl/build.gradle b/x-pack/qa/sql/security/ssl/build.gradle index fe8aaeaff2b64..cfc04f97188a4 100644 --- a/x-pack/qa/sql/security/ssl/build.gradle +++ b/x-pack/qa/sql/security/ssl/build.gradle @@ -1,5 +1,5 @@ +import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.test.NodeInfo import javax.net.ssl.HttpsURLConnection @@ -22,7 +22,7 @@ Object san = new SanEvaluator() File keystoreDir = new File(project.buildDir, 'keystore') // Generate the node's keystore -File nodeKeystore = new File(keystoreDir, 'test-node.jks') +File nodeKeystore = file("$keystoreDir/test-node.jks") task createNodeKeyStore(type: LoggedExec) { doFirst { if (nodeKeystore.parentFile.exists() == false) { @@ -47,7 +47,7 @@ task createNodeKeyStore(type: LoggedExec) { } // Generate the client's keystore -File clientKeyStore = new File(keystoreDir, 'test-client.jks') +File clientKeyStore = file("$keystoreDir/test-client.jks") task createClientKeyStore(type: LoggedExec) { doFirst { if (clientKeyStore.parentFile.exists() == false) { @@ -72,7 +72,7 @@ task createClientKeyStore(type: LoggedExec) { } // Export the node's certificate -File nodeCertificate = new File(keystoreDir, 'test-node.cert') +File nodeCertificate = file("$keystoreDir/test-node.cert") task exportNodeCertificate(type: LoggedExec) { dependsOn createNodeKeyStore doFirst { @@ -104,7 +104,7 @@ task importNodeCertificateInClientKeyStore(type: LoggedExec) { } // Export the client's certificate -File clientCertificate = new File(keystoreDir, 'test-client.cert') +File clientCertificate = file("$keystoreDir/test-client.cert") task exportClientCertificate(type: LoggedExec) { dependsOn createClientKeyStore doFirst { @@ -145,7 +145,6 @@ processTestResources.dependsOn(importNodeCertificateInClientKeyStore, importClie integTestCluster.dependsOn(importClientCertificateInNodeKeyStore) - integTestCluster { // The setup that we actually want setting 'xpack.security.http.ssl.enabled', 'true' @@ -206,9 +205,18 @@ integTestCluster { return tmpFile.exists() } } +Closure notRunningFips = { + Boolean.parseBoolean(BuildPlugin.runJavascript(project, project.runtimeJavaHome, + 'print(java.security.Security.getProviders()[0].name.toLowerCase().contains("fips"));')) == false +} - - +// Do not attempt to form a cluster in a FIPS JVM, as doing so with a JKS keystore will fail. +// TODO Revisit this when SQL CLI client can handle key/certificate instead of only Keystores. +// https://github.com/elastic/elasticsearch/issues/32306 +tasks.matching({ it.name == "integTestCluster#init" }).all { onlyIf notRunningFips } +tasks.matching({ it.name == "integTestCluster#start" }).all { onlyIf notRunningFips } +tasks.matching({ it.name == "integTestCluster#wait" }).all { onlyIf notRunningFips } +tasks.matching({ it.name == "integTestRunner" }).all { onlyIf notRunningFips } /** A lazy evaluator to find the san to use for certificate generation. */ class SanEvaluator { @@ -293,7 +301,7 @@ class SanEvaluator { byte[] bytes = inet6Address.getAddress(); int[] hextets = new int[8]; for (int i = 0; i < hextets.length; i++) { - hextets[i] = (bytes[2 * i] & 255) << 8 | bytes[2 * i + 1] & 255; + hextets[i] = (bytes[2 * i] & 255) << 8 | bytes[2 * i + 1] & 255; } compressLongestRunOfZeroes(hextets); return hextetsToIPv6String(hextets); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java index a339222445a1a..301e15c8efbd5 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java @@ -84,7 +84,7 @@ protected Connection useDataSource() throws SQLException { public static void index(String index, CheckedConsumer body) throws IOException { index(index, "1", body); } - + public static void index(String index, String documentId, CheckedConsumer body) throws IOException { Request request = new Request("PUT", "/" + index + "/doc/" + documentId); request.addParameter("refresh", "true"); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index ec4e8824a19c3..7861557709ed1 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -7,6 +7,7 @@ import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.ResultCode; + import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -302,7 +303,6 @@ public void testStandardLdapConnection() throws Exception { } } - @SuppressWarnings("unchecked") @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29840") public void testHandlingLdapReferralErrors() throws Exception { String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com"; diff --git a/x-pack/qa/third-party/hipchat/build.gradle b/x-pack/qa/third-party/hipchat/build.gradle index cd37d6e738e64..03b6c31969844 100644 --- a/x-pack/qa/third-party/hipchat/build.gradle +++ b/x-pack/qa/third-party/hipchat/build.gradle @@ -4,7 +4,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index 078fed4dd36e2..3814c8e9a5382 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index 683e18caa1c7e..c0f337e160e0a 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index abcdad0e096e1..431752765f3a0 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -5,7 +5,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } diff --git a/x-pack/qa/transport-client-tests/build.gradle b/x-pack/qa/transport-client-tests/build.gradle index c864a9084cba8..a94ad8fd59267 100644 --- a/x-pack/qa/transport-client-tests/build.gradle +++ b/x-pack/qa/transport-client-tests/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } diff --git a/x-pack/test/feature-aware/build.gradle b/x-pack/test/feature-aware/build.gradle index 11b0e67183c8f..f6a1f6cb16f2a 100644 --- a/x-pack/test/feature-aware/build.gradle +++ b/x-pack/test/feature-aware/build.gradle @@ -3,7 +3,7 @@ apply plugin: 'elasticsearch.build' dependencies { compile 'org.ow2.asm:asm:6.2' compile "org.elasticsearch:elasticsearch:${version}" - compile "org.elasticsearch.plugin:x-pack-core:${version}" + compile project(path: xpackModule('core'), configuration: 'shadow') testCompile "org.elasticsearch.test:framework:${version}" } diff --git a/x-pack/transport-client/build.gradle b/x-pack/transport-client/build.gradle index 31c05569274de..7155dad5ee60d 100644 --- a/x-pack/transport-client/build.gradle +++ b/x-pack/transport-client/build.gradle @@ -9,8 +9,8 @@ archivesBaseName = 'x-pack-transport' dependencies { // this "api" dependency looks weird, but it is correct, as it contains - // all of x-pack for now, and transport client will be going away in the future. - compile "org.elasticsearch.plugin:x-pack-core:${version}" + // all of x-pack for now, and transport client will be going away in the future. + compile project(path: xpackModule('core'), configuration: 'shadow') compile "org.elasticsearch.client:transport:${version}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}"