diff --git a/benchmarks/src/main/java/org/elasticsearch/common/RoundingBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/common/RoundingBenchmark.java deleted file mode 100644 index 0dac070377dd4..0000000000000 --- a/benchmarks/src/main/java/org/elasticsearch/common/RoundingBenchmark.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.common; - -import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; -import org.openjdk.jmh.annotations.Benchmark; -import org.openjdk.jmh.annotations.BenchmarkMode; -import org.openjdk.jmh.annotations.Fork; -import org.openjdk.jmh.annotations.Measurement; -import org.openjdk.jmh.annotations.Mode; -import org.openjdk.jmh.annotations.OutputTimeUnit; -import org.openjdk.jmh.annotations.Param; -import org.openjdk.jmh.annotations.Scope; -import org.openjdk.jmh.annotations.Setup; -import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.Warmup; -import org.openjdk.jmh.infra.Blackhole; - -import java.time.ZoneId; -import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; - -@Fork(2) -@Warmup(iterations = 10) -@Measurement(iterations = 5) -@BenchmarkMode(Mode.AverageTime) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -public class RoundingBenchmark { - private static final DateFormatter FORMATTER = DateFormatter.forPattern("date_optional_time"); - - @Param( - { - "2000-01-01 to 2020-01-01", // A super long range - "2000-10-01 to 2000-11-01", // A whole month which is pretty believable - "2000-10-29 to 2000-10-30", // A date right around daylight savings time. - "2000-06-01 to 2000-06-02" // A date fully in one time zone. Should be much faster than above. - } - ) - public String range; - - @Param({ "java time", "es" }) - public String rounder; - - @Param({ "UTC", "America/New_York" }) - public String zone; - - @Param({ "calendar year", "calendar hour", "10d", "5d", "1h" }) - public String interval; - - @Param({ "1", "10000", "1000000", "100000000" }) - public int count; - - private long min; - private long max; - private long[] dates; - private Supplier rounderBuilder; - - @Setup - public void buildDates() { - String[] r = range.split(" to "); - min = FORMATTER.parseMillis(r[0]); - max = FORMATTER.parseMillis(r[1]); - dates = new long[count]; - long date = min; - long diff = (max - min) / dates.length; - for (int i = 0; i < dates.length; i++) { - if (date >= max) { - throw new IllegalStateException("made a bad date [" + date + "]"); - } - dates[i] = date; - date += diff; - } - Rounding.Builder roundingBuilder; - if (interval.startsWith("calendar ")) { - roundingBuilder = Rounding.builder( - DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.substring("calendar ".length())) - ); - } else { - roundingBuilder = Rounding.builder(TimeValue.parseTimeValue(interval, "interval")); - } - Rounding rounding = roundingBuilder.timeZone(ZoneId.of(zone)).build(); - switch (rounder) { - case "java time": - rounderBuilder = rounding::prepareJavaTime; - break; - case "es": - rounderBuilder = () -> rounding.prepare(min, max); - break; - default: - throw new IllegalArgumentException("Expectd rounder to be [java time] or [es]"); - } - } - - @Benchmark - public void round(Blackhole bh) { - Rounding.Prepared rounder = rounderBuilder.get(); - for (int i = 0; i < dates.length; i++) { - bh.consume(rounder.round(dates[i])); - } - } - - @Benchmark - public void nextRoundingValue(Blackhole bh) { - Rounding.Prepared rounder = rounderBuilder.get(); - for (int i = 0; i < dates.length; i++) { - bh.consume(rounder.nextRoundingValue(dates[i])); - } - } -} diff --git a/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle b/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle index 205e5e3229394..7147f5e09610b 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle @@ -196,7 +196,8 @@ def projectPathsToExclude = [ subprojects { plugins.withType(ElasticsearchJavaPlugin).whenPluginAdded { - if (projectPathsToExclude.contains(project.path) == false) { + if (projectPathsToExclude.contains(project.path) == false || + providers.systemProperty("es.format.everything").forUseAtConfigurationTime().isPresent()) { project.apply plugin: "com.diffplug.spotless" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java index e06253a3d9591..3449b4d9e40ee 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java @@ -29,10 +29,17 @@ import org.gradle.workers.WorkParameters; import org.gradle.workers.WorkerExecutor; -import javax.inject.Inject; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.file.Files; import java.util.Arrays; +import java.util.List; +import javax.inject.Inject; +/** + * This task wraps up the details of building a Docker image, including adding a pull + * mechanism that can retry, and emitting the image SHA as a task output. + */ public class DockerBuildTask extends DefaultTask { private static final Logger LOGGER = Logging.getLogger(DockerBuildTask.class); @@ -167,6 +174,8 @@ public void execute() { parameters.getBaseImages().get().forEach(this::pullBaseImage); } + final List tags = parameters.getTags().get(); + LoggedExec.exec(execOperations, spec -> { spec.executable("docker"); @@ -176,17 +185,32 @@ public void execute() { spec.args("--no-cache"); } - parameters.getTags().get().forEach(tag -> spec.args("--tag", tag)); + tags.forEach(tag -> spec.args("--tag", tag)); parameters.getBuildArgs().get().forEach((k, v) -> spec.args("--build-arg", k + "=" + v)); }); + // Fetch the Docker image's hash, and write it to desk as the task's output. Doing this allows us + // to do proper up-to-date checks in Gradle. try { - parameters.getMarkerFile().getAsFile().get().createNewFile(); + final String checksum = getImageChecksum(tags.get(0)); + Files.writeString(parameters.getMarkerFile().getAsFile().get().toPath(), checksum + "\n"); } catch (IOException e) { - throw new RuntimeException("Failed to create marker file", e); + throw new RuntimeException("Failed to write marker file", e); } } + + private String getImageChecksum(String imageTag) { + final ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + + execOperations.exec(spec -> { + spec.setCommandLine("docker", "inspect", "--format", "{{ .Id }}", imageTag); + spec.setStandardOutput(stdout); + spec.setIgnoreExitValue(false); + }); + + return stdout.toString().trim(); + } } interface Parameters extends WorkParameters { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/GeoIpStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/GeoIpStatsResponse.java index 625c3aee775cf..3721e002836c9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/GeoIpStatsResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/GeoIpStatsResponse.java @@ -156,7 +156,7 @@ public Map getDatabases() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("files_in_temp", filesInTemp); + builder.stringListField("files_in_temp", filesInTemp); builder.field("databases", databases.entrySet().stream() .sorted(Map.Entry.comparingByKey()) .map(Map.Entry::getValue) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutAutoFollowPatternRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutAutoFollowPatternRequest.java index b11b20db6963b..4c95a804904bb 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutAutoFollowPatternRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutAutoFollowPatternRequest.java @@ -74,9 +74,9 @@ public void setFollowIndexNamePattern(String followIndexNamePattern) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(PutFollowRequest.REMOTE_CLUSTER_FIELD.getPreferredName(), remoteCluster); - builder.field(LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); + builder.stringListField(LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); if (leaderIndexExclusionPatterns.isEmpty() == false) { - builder.field(LEADER_EXCLUSION_PATTERNS_FIELD.getPreferredName(), leaderIndexExclusionPatterns); + builder.stringListField(LEADER_EXCLUSION_PATTERNS_FIELD.getPreferredName(), leaderIndexExclusionPatterns); } if (followIndexNamePattern != null) { builder.field(FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/PutPolicyRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/PutPolicyRequest.java index 19e816b5bed75..d98d126ada428 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/PutPolicyRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/PutPolicyRequest.java @@ -96,12 +96,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws { builder.startObject(type); { - builder.field(NamedPolicy.INDICES_FIELD.getPreferredName(), indices); + builder.stringListField(NamedPolicy.INDICES_FIELD.getPreferredName(), indices); if (query != null) { builder.field(NamedPolicy.QUERY_FIELD.getPreferredName(), asMap(query, XContentType.JSON)); } builder.field(NamedPolicy.MATCH_FIELD_FIELD.getPreferredName(), matchField); - builder.field(NamedPolicy.ENRICH_FIELDS_FIELD.getPreferredName(), enrichFields); + builder.stringListField(NamedPolicy.ENRICH_FIELDS_FIELD.getPreferredName(), enrichFields); } builder.endObject(); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java index 1fabdb6229f1a..b2eea60b4471d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java @@ -401,7 +401,7 @@ public final PutIndexTemplateRequest masterNodeTimeout(String timeout) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("index_patterns", indexPatterns); + builder.stringListField("index_patterns", indexPatterns); builder.field("order", order); if (version != null) { builder.field("version", version); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java index e1be003e87979..003bd58b0231b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java @@ -118,7 +118,7 @@ public Optional validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.array(INDEX.getPreferredName(), indices.toArray()); + builder.stringListField(INDEX.getPreferredName(), indices); if (queryConfig != null) { builder.field(QUERY.getPreferredName(), queryConfig.getQuery()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java index 1028ab00b3abd..8b8de4a1eb903 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java @@ -133,7 +133,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); if (datafeedIds.isEmpty() == false) { - builder.field(DATAFEED_IDS.getPreferredName(), datafeedIds); + builder.stringListField(DATAFEED_IDS.getPreferredName(), datafeedIds); } if (allowNoMatch != null) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java index 87a2d6cdd8da1..5ff9a9c608e4d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java @@ -133,7 +133,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); if (jobIds.isEmpty() == false) { - builder.field(JOB_IDS.getPreferredName(), jobIds); + builder.stringListField(JOB_IDS.getPreferredName(), jobIds); } if (allowNoMatch != null) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java index ed07b27f4fb90..0aadf67cff615 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java @@ -102,10 +102,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(MlFilter.DESCRIPTION.getPreferredName(), description); } if (addItems != null) { - builder.field(ADD_ITEMS.getPreferredName(), addItems); + builder.stringListField(ADD_ITEMS.getPreferredName(), addItems); } if (removeItems != null) { - builder.field(REMOVE_ITEMS.getPreferredName(), removeItems); + builder.stringListField(REMOVE_ITEMS.getPreferredName(), removeItems); } builder.endObject(); return builder; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java index 487fc825e610c..8d79d4686e3d4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java @@ -75,7 +75,7 @@ public String getDescription() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(ID.getPreferredName(), id); - builder.field(JOB_IDS.getPreferredName(), jobIds); + builder.stringListField(JOB_IDS.getPreferredName(), jobIds); if (description != null) { builder.field(DESCRIPTION.getPreferredName(), description); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java index cbd1ab078619a..54383e7bfdabf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java @@ -9,17 +9,17 @@ import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.common.xcontent.ParseField; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.Collections; import java.util.HashSet; import java.util.Objects; @@ -179,7 +179,7 @@ public DateHistogramGroupConfig(final String field, final DateHistogramInterval * The {@code field} and {@code interval} are required to compute the date histogram for the rolled up documents. * The {@code delay} is optional and can be set to {@code null}. It defines how long to wait before rolling up new documents. * The {@code timeZone} is optional and can be set to {@code null}. When configured, the time zone value is resolved using - * ({@link DateTimeZone#forID(String)} and must match a time zone identifier provided by the Joda Time library. + * ({@link ZoneId#of(String)} and must match a time zone identifier provided by the Joda Time library. *

* @param field the name of the date field to use for the date histogram (required) * @param interval the interval to use for the date histogram (required) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/QueuedWatch.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/QueuedWatch.java index d5a1f1c9be65f..3c61a38c758dd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/QueuedWatch.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/QueuedWatch.java @@ -8,10 +8,10 @@ package org.elasticsearch.client.watcher; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.joda.time.DateTime; +import org.elasticsearch.common.xcontent.ParseField; +import java.time.ZonedDateTime; import java.util.Objects; public class QueuedWatch { @@ -21,8 +21,8 @@ public class QueuedWatch { new ConstructingObjectParser<>("watcher_stats_node", true, (args, c) -> new QueuedWatch( (String) args[0], (String) args[1], - DateTime.parse((String) args[2]), - DateTime.parse((String) args[3]) + ZonedDateTime.parse((String) args[2]), + ZonedDateTime.parse((String) args[3]) )); static { @@ -35,10 +35,10 @@ public class QueuedWatch { private final String watchId; private final String watchRecordId; - private final DateTime triggeredTime; - private final DateTime executionTime; + private final ZonedDateTime triggeredTime; + private final ZonedDateTime executionTime; - public QueuedWatch(String watchId, String watchRecordId, DateTime triggeredTime, DateTime executionTime) { + public QueuedWatch(String watchId, String watchRecordId, ZonedDateTime triggeredTime, ZonedDateTime executionTime) { this.watchId = watchId; this.watchRecordId = watchRecordId; this.triggeredTime = triggeredTime; @@ -53,11 +53,11 @@ public String getWatchRecordId() { return watchRecordId; } - public DateTime getTriggeredTime() { + public ZonedDateTime getTriggeredTime() { return triggeredTime; } - public DateTime getExecutionTime() { + public ZonedDateTime getExecutionTime() { return executionTime; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchExecutionSnapshot.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchExecutionSnapshot.java index 16afc35d8782c..c212347a3504d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchExecutionSnapshot.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/WatchExecutionSnapshot.java @@ -8,10 +8,10 @@ package org.elasticsearch.client.watcher; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.joda.time.DateTime; +import org.elasticsearch.common.xcontent.ParseField; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -23,8 +23,8 @@ public class WatchExecutionSnapshot { new ConstructingObjectParser<>("watcher_stats_node", true, (args, c) -> new WatchExecutionSnapshot( (String) args[0], (String) args[1], - DateTime.parse((String) args[2]), - DateTime.parse((String) args[3]), + ZonedDateTime.parse((String) args[2]), + ZonedDateTime.parse((String) args[3]), ExecutionPhase.valueOf(((String) args[4]).toUpperCase(Locale.ROOT)), args[5] == null ? null : ((List) args[5]).toArray(new String[0]), args[6] == null ? null : ((List) args[6]).toArray(new String[0]) @@ -42,13 +42,13 @@ public class WatchExecutionSnapshot { private final String watchId; private final String watchRecordId; - private final DateTime triggeredTime; - private final DateTime executionTime; + private final ZonedDateTime triggeredTime; + private final ZonedDateTime executionTime; private final ExecutionPhase phase; private final String[] executedActions; private final String[] executionStackTrace; - public WatchExecutionSnapshot(String watchId, String watchRecordId, DateTime triggeredTime, DateTime executionTime, + public WatchExecutionSnapshot(String watchId, String watchRecordId, ZonedDateTime triggeredTime, ZonedDateTime executionTime, ExecutionPhase phase, String[] executedActions, String[] executionStackTrace) { this.watchId = watchId; this.watchRecordId = watchRecordId; @@ -67,11 +67,11 @@ public String getWatchRecordId() { return watchRecordId; } - public DateTime getTriggeredTime() { + public ZonedDateTime getTriggeredTime() { return triggeredTime; } - public DateTime getExecutionTime() { + public ZonedDateTime getExecutionTime() { return executionTime; } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 27e04881d5896..3da1e41de2ea5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -48,15 +48,17 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; import java.io.IOException; +import java.time.LocalTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @@ -923,7 +925,8 @@ private void validateBulkResponses(int nbItems, boolean[] errors, BulkResponse b public void testUrlEncode() throws IOException { String indexPattern = ""; String expectedIndex = "logstash-" + - DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy()); + DateTimeFormatter.ofPattern("uuuu.MM.dd", Locale.ROOT) + .format(ZonedDateTime.now(ZoneOffset.UTC).withDayOfMonth(1).with(LocalTime.MIN)); { IndexRequest indexRequest = new IndexRequest(indexPattern).id("id#1"); indexRequest.source("field", "value"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java index 689a9ddbc108c..e06fdb2f56f76 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java @@ -53,10 +53,10 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; -import org.joda.time.Instant; import org.junit.After; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.HashMap; import java.util.List; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/DataCountsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/DataCountsTests.java index 8e78ec3ed6aa3..baaafe7bd53ff 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/DataCountsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/DataCountsTests.java @@ -9,20 +9,24 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; -import org.joda.time.DateTime; import java.time.Instant; +import java.time.ZonedDateTime; +import java.util.Date; public class DataCountsTests extends AbstractXContentTestCase { + private static Date randomDate() { + return Date.from(ZonedDateTime.now(randomZone()).toInstant()); + } + public static DataCounts createTestInstance(String jobId) { return new DataCounts(jobId, randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), - new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), - new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), - new DateTime(randomDateTimeZone()).toDate(), randomBoolean() ? null : Instant.now()); + randomDate(), randomDate(), randomDate(), randomDate(), randomDate(), + randomBoolean() ? null : Instant.now()); } @Override diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatcherStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatcherStatsResponseTests.java index f29ba51113273..1771e29162980 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatcherStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatcherStatsResponseTests.java @@ -12,10 +12,11 @@ import org.elasticsearch.client.NodesResponseHeaderTestUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -142,7 +143,8 @@ protected WatcherStatsResponse createTestInstance() { } } snapshots.add(new WatchExecutionSnapshot(randomAlphaOfLength(10), randomAlphaOfLength(10), - new DateTime(randomInt(), DateTimeZone.UTC), new DateTime(randomInt(), DateTimeZone.UTC), + ZonedDateTime.ofInstant(Instant.ofEpochMilli(randomInt()), ZoneOffset.UTC), + ZonedDateTime.ofInstant(Instant.ofEpochMilli(randomInt()), ZoneOffset.UTC), randomFrom(ExecutionPhase.values()), actions, stackTrace)); } } @@ -153,7 +155,8 @@ protected WatcherStatsResponse createTestInstance() { queuedWatches = new ArrayList<>(queuedWatchCount); for (int j=0; j def (major,minor) = VersionProperties.elasticsearch.split("\\.") + // We tag our Docker images with various pieces of information, including a timestamp + // for when the image was built. However, this makes it impossible completely cache + // the image. When developing the Docker images, it's very tedious to completely rebuild + // an image for every single change. Therefore, outside of CI, we fix the + // build time to midnight so that the Docker build cache is usable. + def buildDate = BuildParams.isCi() ? BuildParams.buildDate : BuildParams.buildDate.truncatedTo(ChronoUnit.DAYS) + return [ 'base_image' : base.image, 'bin_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'bin', - 'build_date' : BuildParams.buildDate, + 'build_date' : buildDate, 'config_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'config', 'git_revision' : BuildParams.gitRevision, 'license' : base == DockerBase.IRON_BANK ? 'Elastic License 1.0' : 'Elastic-License-2.0', @@ -341,6 +349,7 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { dockerContext.fileProvider(transformTask.map { Sync task -> task.getDestinationDir() }) + noCache = BuildParams.isCi tags = generateTags(base) if (base == DockerBase.IRON_BANK) { @@ -402,11 +411,11 @@ void addBuildEssDockerImageTask(Architecture architecture) { tasks.register(taskName("build", architecture, base, "DockerImage"), DockerBuildTask) { TaskProvider buildCloudTask = tasks.named(taskName("build", architecture, DockerBase.CLOUD, "DockerImage")) - dependsOn(buildCloudTask) - dependsOn(buildContextTask) + inputs.files(buildCloudTask) dockerContext.fileProvider(buildContextTask.map { it.getDestinationDir() }) + noCache = BuildParams.isCi baseImages = [] tags = generateTags(base) diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index 2f052dcf76a3f..8f4f65ce3d216 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -4,6 +4,9 @@ Composite ++++ +WARNING: The composite aggregation is expensive. Load test your application +before deploying a composite aggregation in production. + A multi-bucket aggregation that creates composite buckets from different sources. Unlike the other `multi-bucket` aggregations, you can use the `composite` diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index b92f3403d8aa5..a2ba22578275f 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -960,11 +960,11 @@ Time in milliseconds recovery operations were delayed due to throttling. ======= -`shards`:: +`shards_stats`:: (object) Contains statistics about all shards assigned to the node. + -.Properties of `shards` +.Properties of `shard_stats` [%collapsible%open] ======= `total_count`:: diff --git a/docs/reference/migration/migrate_8_0/packaging.asciidoc b/docs/reference/migration/migrate_8_0/packaging.asciidoc index 2e5ef1146cf3b..554353215d098 100644 --- a/docs/reference/migration/migrate_8_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_8_0/packaging.asciidoc @@ -16,6 +16,13 @@ line tools. *Impact* + Use Java 11 or higher. Attempts to run {es} 8.0 using earlier Java versions will fail. + +Note that there is not yet a FIPS-certified security module for Java 17 +that you can use when running Elasticsearch 8.0 in FIPS 140-2 mode. +If you run in FIPS 140-2 mode, you will either need to request an exception +from your security organization to upgrade to Elasticsearch 8.0, +or remain on Elasticsearch 7.x until Java 17 is certified. + ==== .JAVA_HOME is no longer supported. diff --git a/docs/reference/modules/indices/index_management.asciidoc b/docs/reference/modules/indices/index_management.asciidoc index cda08db2b9eb1..8ff7778b21ef2 100644 --- a/docs/reference/modules/indices/index_management.asciidoc +++ b/docs/reference/modules/indices/index_management.asciidoc @@ -34,7 +34,7 @@ Specifies the hosts that can be <>. // end::reindex-remote-whitelist[] [[stack-templates-enabled]] -`stack.templates.enabled` {ess-icon}:: +`stack.templates.enabled`:: + -- (<>) diff --git a/docs/reference/snapshot-restore/restore-snapshot.asciidoc b/docs/reference/snapshot-restore/restore-snapshot.asciidoc index 26744fdbaeccb..eda412f0084e7 100644 --- a/docs/reference/snapshot-restore/restore-snapshot.asciidoc +++ b/docs/reference/snapshot-restore/restore-snapshot.asciidoc @@ -100,9 +100,10 @@ operation is complete. WARNING: If the <> cluster setting is `false`, don't use the <> to -target the `*` or `.*` wildcard expression. If you use {es}'s security features, -this will delete system indices required for authentication. To delete all -regular indices, use `*,-.*` instead. +target the `*` or `.*` wildcard pattern. If you use {es}'s security features, +this will delete system indices required for authentication. Instead, target the +`*,-.*` wildcard pattern to exclude these system indices and other index names +that begin with a dot (`.`). [source,console] ---- @@ -118,9 +119,10 @@ By default, a restore request attempts to restore all indices and data streams in the snapshot, including system indices. If your cluster already contains one or more of these system indices, the request will return an error. -To avoid this error, specify the indices and data streams to restore. To -exclude system indices, append the `-.*` wildcard pattern. To restore all -indices and data streams except system indices, use `*,-.*`. +To avoid this error, specify the indices and data streams to restore. To exclude +system indices and other index names that begin with a dot (`.`), append the +`-.*` wildcard pattern. To restore all indices and data streams except dot +indices, use `*,-.*`. [source,console] ---- @@ -323,8 +325,7 @@ PUT _cluster/settings ---- // TEST[setup:setup-snapshots] -. Delete all existing indices and data streams on the cluster, including all -system indices. +. Delete existing indices and data streams on the cluster. + [source,console] ---- diff --git a/docs/reference/upgrade.asciidoc b/docs/reference/upgrade.asciidoc index ba760707202aa..82d8dd43bd0d6 100644 --- a/docs/reference/upgrade.asciidoc +++ b/docs/reference/upgrade.asciidoc @@ -3,6 +3,16 @@ [partintro] -- +ifeval::["{release-state}"!="released"] +[[upgrade-pre-release]] +IMPORTANT: This documentation is for a pre-release of {es} {minor-version}. +Upgrades from pre-release builds are not supported and +could result in errors or data loss. +If you upgrade from a released version to a pre-release verion for testing, +discard the contents of the cluster when you are done. +Do not attempt to upgrade to the final release. +endif::[] + {es} can usually be upgraded using a <> process so upgrading does not interrupt service. Rolling upgrades are supported: @@ -16,7 +26,7 @@ endif::[] [TIP] ==== -For rolling upgrades between major versions (e.g., 5.6 to 6.8), we recommend +For rolling upgrades between major versions, we recommend using the {kibana-ref}/upgrade-assistant.html[Kibana Upgrade Assistant]. The upgrade assistant identifies deprecated settings in your cluster and guides @@ -26,7 +36,9 @@ We also recommend checking your <> for any other functionality that may have changed. ==== -The following table shows the recommended upgrade paths to {version}. +[discrete] +[[upgrade-paths]] +=== Upgrade paths to {version} [cols="<1,3",options="header",] |==== @@ -69,12 +81,18 @@ To upgrade directly to {version} from 6.7 or earlier, you must shut down the cluster, install {version}, and restart. For more information, see <>. -[WARNING] -==== +[discrete] +[[upgrade-downgrade]] +=== Downgrades + In-place downgrades to earlier versions are *not* supported. To downgrade to an earlier version, <> taken prior to the version upgrade. -==== + + +[discrete] +[[upgrade-index-compatibility]] +=== Index compatibility {es} can read indices created in the previous major version. If you have indices created in 5.x or before, you must reindex or delete them @@ -87,17 +105,11 @@ When upgrading to a new version of {es}, you need to upgrade each of the products in your Elastic Stack. For more information, see the {stack-ref}/upgrading-elastic-stack.html[Elastic Stack Installation and Upgrade Guide]. -ifeval::["{release-state}"!="released"] -[[upgrade-pre-release]] -NOTE: This documentation is for {es} version {version}, which is not yet -released. You may run a pre-release build of {es} for testing, and you may -upgrade from an earlier released version to a pre-release build of {es} -{version} if permitted by the compatibility table above, but upgrading from a -pre-release build to another build (whether released or not) is unsupported. -Upgrading a pre-release build may result in errors or may appear to succeed -having silently lost some data. You should discard the contents of a cluster -running a pre-release build before using a different build. -endif::[] +[discrete] +[[upgrade-fips-java17]] +=== FIPS Compliance and Java 17 + +include::{xes-repo-dir}/security/fips-java17.asciidoc[] -- diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index 4c464a1de2cd3..a9c62a646d3be 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -25,8 +25,10 @@ import java.util.Arrays; import java.util.Base64; import java.util.Calendar; +import java.util.Collection; import java.util.Collections; import java.util.Date; +import java.util.EnumSet; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.IdentityHashMap; @@ -921,6 +923,58 @@ private XContentBuilder value(ToXContent value, ToXContent.Params params) throws // Maps & Iterable ////////////////////////////////// + public XContentBuilder stringListField(String name, Collection values) throws IOException { + field(name); + if (values == null) { + return nullValue(); + } + startArray(); + for (String value : values) { + value(value); + } + endArray(); + return this; + } + + public XContentBuilder xContentList(String name, Collection values) throws IOException { + field(name); + if (values == null) { + return nullValue(); + } + startArray(); + for (ToXContent value : values) { + value(value); + } + endArray(); + return this; + } + + public XContentBuilder xContentList(String name, ToXContent... values) throws IOException { + field(name); + if (values == null) { + return nullValue(); + } + startArray(); + for (ToXContent value : values) { + value(value); + } + endArray(); + return this; + } + + public XContentBuilder enumSet(String name, EnumSet values) throws IOException { + field(name); + if (values == null) { + return nullValue(); + } + startArray(); + for (Enum value : values) { + value(value); + } + endArray(); + return this; + } + public XContentBuilder field(String name, Map values) throws IOException { return field(name).map(values); } @@ -929,6 +983,32 @@ public XContentBuilder map(Map values) throws IOException { return map(values, true, true); } + public XContentBuilder stringStringMap(String name, Map values) throws IOException { + field(name); + if (values == null) { + return nullValue(); + } + startObject(); + for (Map.Entry value : values.entrySet()) { + field(value.getKey()); + value(value.getValue()); + } + return endObject(); + } + + public XContentBuilder xContentValuesMap(String name, Map values) throws IOException { + field(name); + if (values == null) { + return nullValue(); + } + startObject(); + for (Map.Entry value : values.entrySet()) { + field(value.getKey()); + value(value.getValue()); + } + return endObject(); + } + /** writes a map without the start object and end object headers */ public XContentBuilder mapContents(Map values) throws IOException { return map(values, true, false); @@ -1026,6 +1106,11 @@ public XContentBuilder percentageField(String rawFieldName, String readableField return this; } + public XContentBuilder field(String name, Enum value) throws IOException { + field(name); + return value(value == null ? null : value.toString()); + } + //////////////////////////////////////////////////////////////////////////// // Raw fields ////////////////////////////////// diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index 35482bbe93fa0..6c43acccd7036 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -547,7 +547,11 @@ public synchronized int read() throws IOException { // reclaim them (see MonoSendMany). Additionally, that very same operator requests // 128 elements (that's hardcoded) once it's subscribed (later on, it requests // by 64 elements), that's why we provide 64kb buffers. - return Flux.range(0, (int) Math.ceil((double) length / (double) chunkSize)) + + // length is at most 100MB so it's safe to cast back to an integer in this case + final int parts = (int) length / chunkSize; + final long remaining = length % chunkSize; + return Flux.range(0, remaining == 0 ? parts : parts + 1) .map(i -> i * chunkSize) .concatMap(pos -> Mono.fromCallable(() -> { long count = pos + chunkSize > length ? length - pos : chunkSize; diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java index 0909fc7a5e237..f1a50150b0282 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -329,8 +329,11 @@ public void testWriteBlobWithRetries() throws Exception { public void testWriteLargeBlob() throws Exception { final int maxRetries = randomIntBetween(2, 5); - final byte[] data = randomBytes((int) ByteSizeUnit.MB.toBytes(10)); - int nbBlocks = (int) Math.ceil((double) data.length / (double) ByteSizeUnit.MB.toBytes(1)); + final byte[] data = randomBytes(ByteSizeUnit.MB.toIntBytes(10) + randomIntBetween(0, ByteSizeUnit.MB.toIntBytes(1))); + int nbBlocks = data.length / ByteSizeUnit.MB.toIntBytes(1); + if (data.length % ByteSizeUnit.MB.toIntBytes(1) != 0) { + nbBlocks += 1; + } final int nbErrors = 2; // we want all requests to fail at least once final AtomicInteger countDownUploads = new AtomicInteger(nbErrors * nbBlocks); @@ -378,6 +381,9 @@ public void testWriteLargeBlob() throws Exception { if (randomBoolean()) { Streams.readFully(exchange.getRequestBody()); AzureHttpHandler.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); + } else { + long contentLength = Long.parseLong(exchange.getRequestHeaders().getFirst("Content-Length")); + readFromInputStream(exchange.getRequestBody(), randomLongBetween(0, contentLength)); } exchange.close(); }); @@ -621,4 +627,16 @@ private String getEndpointForServer(HttpServer server, String accountName) { InetSocketAddress address = server.getAddress(); return "http://" + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort() + "/" + accountName; } + + private void readFromInputStream(InputStream inputStream, long bytesToRead) { + try { + long totalBytesRead = 0; + while (inputStream.read() != -1 && totalBytesRead < bytesToRead) { + totalBytesRead += 1; + } + assertThat(totalBytesRead, equalTo(bytesToRead)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } } diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 45689f0fed691..f6d5a4a845430 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -64,6 +64,7 @@ import static org.elasticsearch.packaging.util.docker.Docker.waitForElasticsearch; import static org.elasticsearch.packaging.util.docker.DockerFileMatcher.file; import static org.elasticsearch.packaging.util.docker.DockerRun.builder; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; @@ -80,8 +81,13 @@ import static org.junit.Assume.assumeTrue; /** - * This class tests the Elasticsearch Docker images. We have more than one because we build - * an image with a custom, small base image, and an image based on RedHat's UBI. + * This class tests the Elasticsearch Docker images. We have several: + *
    + *
  • The default image with a custom, small base image
  • + *
  • A UBI-based image
  • + *
  • Another UBI image for Iron Bank
  • + *
  • Images for Cloud
  • + *
*/ public class DockerTests extends PackagingTestCase { private Path tempDir; @@ -95,10 +101,7 @@ public static void filterDistros() { @Before public void setupTest() throws IOException { - installation = runContainer( - distribution(), - builder().envVars(Map.of("ingest.geoip.downloader.enabled", "false", "ELASTIC_PASSWORD", PASSWORD)) - ); + installation = runContainer(distribution(), builder().envVar("ELASTIC_PASSWORD", PASSWORD)); tempDir = createTempDir(DockerTests.class.getSimpleName()); } @@ -129,7 +132,7 @@ public void test011SecurityEnabledStatus() throws Exception { */ public void test012SecurityCanBeDisabled() throws Exception { // restart container with security disabled - runContainer(distribution(), builder().envVars(Map.of("xpack.security.enabled", "false"))); + runContainer(distribution(), builder().envVar("xpack.security.enabled", "false")); waitForElasticsearch(installation); final int unauthStatusCode = ServerUtils.makeRequestAndGetStatus(Request.Get("http://localhost:9200"), null, null, null); assertThat(unauthStatusCode, equalTo(200)); @@ -155,7 +158,7 @@ public void test020PluginsListWithNoPlugins() { */ public void test021PluginsListWithPlugins() { assumeTrue( - "Only applies to non-Cloud images", + "Only applies to Cloud images", distribution.packaging == Packaging.DOCKER_CLOUD || distribution().packaging == Packaging.DOCKER_CLOUD_ESS ); @@ -214,7 +217,7 @@ public void test041AmazonCaCertsAreInTheKeystore() { /** * Check that when the keystore is created on startup, it is created with the correct permissions. */ - public void test042KeystorePermissionsAreCorrect() throws Exception { + public void test042KeystorePermissionsAreCorrect() { waitForElasticsearch(installation, USERNAME, PASSWORD); assertThat(installation.config("elasticsearch.keystore"), file(p660)); @@ -252,20 +255,11 @@ public void test070BindMountCustomPathConfAndJvmOptions() throws Exception { Files.setPosixFilePermissions(tempDir.resolve("log4j2.properties"), p644); // Restart the container - final Map volumes = Map.of(tempDir, Path.of("/usr/share/elasticsearch/config")); runContainer( distribution(), - builder().volumes(volumes) - .envVars( - Map.of( - "ES_JAVA_OPTS", - "-XX:-UseCompressedOops", - "ingest.geoip.downloader.enabled", - "false", - "ELASTIC_PASSWORD", - PASSWORD - ) - ) + builder().volume(tempDir, "/usr/share/elasticsearch/config") + .envVar("ES_JAVA_OPTS", "-XX:-UseCompressedOops") + .envVar("ELASTIC_PASSWORD", PASSWORD) ); waitForElasticsearch(installation, USERNAME, PASSWORD); @@ -292,11 +286,9 @@ public void test071BindMountCustomPathWithDifferentUID() throws Exception { mkDirWithPrivilegeEscalation(tempEsDataDir, 1500, 0); // Restart the container - final Map volumes = Map.of(tempEsDataDir.toAbsolutePath(), installation.data); - runContainer( distribution(), - builder().volumes(volumes).envVars(Map.of("ingest.geoip.downloader.enabled", "false", "ELASTIC_PASSWORD", PASSWORD)) + builder().volume(tempEsDataDir.toAbsolutePath(), installation.data).envVar("ELASTIC_PASSWORD", PASSWORD) ); waitForElasticsearch(installation, USERNAME, PASSWORD); @@ -341,18 +333,14 @@ public void test072RunEsAsDifferentUserAndGroup() throws Exception { chownWithPrivilegeEscalation(tempEsDataDir, "501:501"); chownWithPrivilegeEscalation(tempEsLogsDir, "501:501"); - // Define the bind mounts - final Map volumes = new HashMap<>(); - volumes.put(tempEsDataDir.toAbsolutePath(), installation.data); - volumes.put(tempEsConfigDir.toAbsolutePath(), installation.config); - volumes.put(tempEsLogsDir.toAbsolutePath(), installation.logs); - // Restart the container runContainer( distribution(), - builder().volumes(volumes) - .envVars(Map.of("ingest.geoip.downloader.enabled", "false", "ELASTIC_PASSWORD", PASSWORD)) + builder().envVar("ELASTIC_PASSWORD", PASSWORD) .uid(501, 501) + .volume(tempEsDataDir.toAbsolutePath(), installation.data) + .volume(tempEsConfigDir.toAbsolutePath(), installation.config) + .volume(tempEsLogsDir.toAbsolutePath(), installation.logs) ); waitForElasticsearch(installation, USERNAME, PASSWORD); @@ -362,14 +350,9 @@ public void test072RunEsAsDifferentUserAndGroup() throws Exception { * Check that it is possible to run Elasticsearch under a different user and group to the default, * without bind-mounting any directories, provided the container user is added to the `root` group. */ - public void test073RunEsAsDifferentUserAndGroupWithoutBindMounting() throws Exception { + public void test073RunEsAsDifferentUserAndGroupWithoutBindMounting() { // Restart the container - runContainer( - distribution(), - builder().envVars(Map.of("ingest.geoip.downloader.enabled", "false", "ELASTIC_PASSWORD", PASSWORD)) - .uid(501, 501) - .extraArgs("--group-add 0") - ); + runContainer(distribution(), builder().extraArgs("--group-add 0").uid(501, 501).envVar("ELASTIC_PASSWORD", PASSWORD)); waitForElasticsearch(installation, USERNAME, PASSWORD); } @@ -384,18 +367,17 @@ public void test080ConfigurePasswordThroughEnvironmentVariableFile() throws Exce // ELASTIC_PASSWORD_FILE Files.writeString(tempDir.resolve(passwordFilename), xpackPassword + "\n"); - Map envVars = Map.of("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename); - // File permissions need to be secured in order for the ES wrapper to accept // them for populating env var values Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600); // But when running in Vagrant, also ensure ES can actually access the file chownWithPrivilegeEscalation(tempDir.resolve(passwordFilename), "1000:0"); - final Map volumes = Map.of(tempDir, Path.of("/run/secrets")); - // Restart the container - runContainer(distribution(), builder().volumes(volumes).envVars(envVars)); + runContainer( + distribution(), + builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename) + ); // If we configured security correctly, then this call will only work if we specify the correct credentials. try { @@ -432,18 +414,17 @@ public void test081SymlinksAreFollowedWithEnvironmentVariableFiles() throws Exce // it won't resolve inside the container. Files.createSymbolicLink(tempDir.resolve(symlinkFilename), Path.of(passwordFilename)); - Map envVars = Map.of("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename); - // File permissions need to be secured in order for the ES wrapper to accept // them for populating env var values. The wrapper will resolve the symlink // and check the target's permissions. Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600); - final Map volumes = Map.of(tempDir, Path.of("/run/secrets")); - // Restart the container - this will check that Elasticsearch started correctly, // and didn't fail to follow the symlink and check the file permissions - runContainer(distribution(), builder().volumes(volumes).envVars(envVars)); + runContainer( + distribution(), + builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename) + ); } /** @@ -454,17 +435,16 @@ public void test082CannotUseEnvVarsAndFiles() throws Exception { Files.writeString(tempDir.resolve(passwordFilename), "other_hunter2\n"); - Map envVars = new HashMap<>(); - envVars.put("ELASTIC_PASSWORD", "hunter2"); - envVars.put("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename); - // File permissions need to be secured in order for the ES wrapper to accept // them for populating env var values Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600); - final Map volumes = Map.of(tempDir, Path.of("/run/secrets")); - - final Result dockerLogs = runContainerExpectingFailure(distribution, builder().volumes(volumes).envVars(envVars)); + final Result dockerLogs = runContainerExpectingFailure( + distribution, + builder().volume(tempDir, "/run/secrets") + .envVar("ELASTIC_PASSWORD", "hunter2") + .envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename) + ); assertThat( dockerLogs.stderr, @@ -481,15 +461,14 @@ public void test083EnvironmentVariablesUsingFilesHaveCorrectPermissions() throws Files.writeString(tempDir.resolve(passwordFilename), "hunter2\n"); - Map envVars = Map.of("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename); - // Set invalid file permissions Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p660); - final Map volumes = Map.of(tempDir, Path.of("/run/secrets")); - // Restart the container - final Result dockerLogs = runContainerExpectingFailure(distribution(), builder().volumes(volumes).envVars(envVars)); + final Result dockerLogs = runContainerExpectingFailure( + distribution(), + builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename) + ); assertThat( dockerLogs.stderr, @@ -518,15 +497,14 @@ public void test084SymlinkToFileWithInvalidPermissionsIsRejected() throws Except // it won't resolve inside the container. Files.createSymbolicLink(tempDir.resolve(symlinkFilename), Path.of(passwordFilename)); - Map envVars = Map.of("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename); - // Set invalid permissions on the file that the symlink targets Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p775); - final Map volumes = Map.of(tempDir, Path.of("/run/secrets")); - // Restart the container - final Result dockerLogs = runContainerExpectingFailure(distribution(), builder().volumes(volumes).envVars(envVars)); + final Result dockerLogs = runContainerExpectingFailure( + distribution(), + builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename) + ); assertThat( dockerLogs.stderr, @@ -545,7 +523,7 @@ public void test084SymlinkToFileWithInvalidPermissionsIsRejected() throws Except * `docker exec`, where the Docker image's entrypoint is not executed. */ public void test085EnvironmentVariablesAreRespectedUnderDockerExec() throws Exception { - installation = runContainer(distribution(), builder().envVars(Map.of("ELASTIC_PASSWORD", "hunter2"))); + installation = runContainer(distribution(), builder().envVar("ELASTIC_PASSWORD", "hunter2")); // The tool below requires a keystore, so ensure that ES is fully initialised before proceeding. waitForElasticsearch("green", null, installation, "elastic", "hunter2"); @@ -570,7 +548,7 @@ public void test085EnvironmentVariablesAreRespectedUnderDockerExec() throws Exce */ public void test086EnvironmentVariablesInSnakeCaseAreTranslated() { // Note the double-underscore in the var name here, which retains the underscore in translation - installation = runContainer(distribution(), builder().envVars(Map.of("ES_SETTING_XPACK_SECURITY_FIPS__MODE_ENABLED", "false"))); + installation = runContainer(distribution(), builder().envVar("ES_SETTING_XPACK_SECURITY_FIPS__MODE_ENABLED", "false")); final Optional commandLine = sh.run("bash -c 'COLUMNS=2000 ps ax'").stdout.lines() .filter(line -> line.contains("org.elasticsearch.bootstrap.Elasticsearch")) @@ -585,16 +563,18 @@ public void test086EnvironmentVariablesInSnakeCaseAreTranslated() { * Check that environment variables that do not match the criteria for translation to settings are ignored. */ public void test087EnvironmentVariablesInIncorrectFormatAreIgnored() { - final Map envVars = new HashMap<>(); - // No ES_SETTING_ prefix - envVars.put("XPACK_SECURITY_FIPS__MODE_ENABLED", "false"); - // Incomplete prefix - envVars.put("ES_XPACK_SECURITY_FIPS__MODE_ENABLED", "false"); - // Not underscore-separated - envVars.put("ES.XPACK.SECURITY.FIPS_MODE.ENABLED", "false"); - // Not uppercase - envVars.put("es_xpack_security_fips__mode_enabled", "false"); - installation = runContainer(distribution(), builder().envVars(envVars)); + installation = runContainer( + distribution(), + builder() + // No ES_SETTING_ prefix + .envVar("XPACK_SECURITY_FIPS__MODE_ENABLED", "false") + // Incomplete prefix + .envVar("ES_XPACK_SECURITY_FIPS__MODE_ENABLED", "false") + // Not underscore-separated + .envVar("ES.SETTING.XPACK.SECURITY.FIPS_MODE.ENABLED", "false") + // Not uppercase + .envVar("es_setting_xpack_security_fips__mode_enabled", "false") + ); final Optional commandLine = sh.run("bash -c 'COLUMNS=2000 ps ax'").stdout.lines() .filter(line -> line.contains("org.elasticsearch.bootstrap.Elasticsearch")) @@ -605,6 +585,32 @@ public void test087EnvironmentVariablesInIncorrectFormatAreIgnored() { assertThat(commandLine.get(), not(containsString("-Expack.security.fips_mode.enabled=false"))); } + /** + * Check that settings are applied when they are supplied as environment variables with names that: + *
    + *
  • Consist only of lowercase letters, numbers, underscores and hyphens
  • + *
  • Separated by periods
  • + *
+ */ + public void test088EnvironmentVariablesInDottedFormatArePassedThrough() { + // Note the double-underscore in the var name here, which retains the underscore in translation + installation = runContainer( + distribution(), + builder().envVar("xpack.security.fips_mode.enabled", "false").envVar("http.cors.allow-methods", "GET") + ); + + final Optional commandLine = sh.run("bash -c 'COLUMNS=2000 ps ax'").stdout.lines() + .filter(line -> line.contains("org.elasticsearch.bootstrap.Elasticsearch")) + .findFirst(); + + assertThat(commandLine.isPresent(), equalTo(true)); + + assertThat( + commandLine.get(), + allOf(containsString("-Expack.security.fips_mode.enabled=false"), containsString("-Ehttp.cors.allow-methods=GET")) + ); + } + /** * Check whether the elasticsearch-certutil tool has been shipped correctly, * and if present then it can execute. @@ -746,7 +752,7 @@ public void test110OrgOpencontainersLabels() throws Exception { /** * Check that the container logs contain the expected content for Elasticsearch itself. */ - public void test120DockerLogsIncludeElasticsearchLogs() throws Exception { + public void test120DockerLogsIncludeElasticsearchLogs() { waitForElasticsearch(installation, USERNAME, PASSWORD); final Result containerLogs = getContainerLogs(); @@ -757,11 +763,8 @@ public void test120DockerLogsIncludeElasticsearchLogs() throws Exception { /** * Check that it is possible to write logs to disk */ - public void test121CanUseStackLoggingConfig() throws Exception { - runContainer( - distribution(), - builder().envVars(Map.of("ES_LOG_STYLE", "file", "ingest.geoip.downloader.enabled", "false", "ELASTIC_PASSWORD", PASSWORD)) - ); + public void test121CanUseStackLoggingConfig() { + runContainer(distribution(), builder().envVar("ES_LOG_STYLE", "file").envVar("ELASTIC_PASSWORD", PASSWORD)); waitForElasticsearch(installation, USERNAME, PASSWORD); @@ -779,11 +782,8 @@ public void test121CanUseStackLoggingConfig() throws Exception { /** * Check that the default logging config can be explicitly selected. */ - public void test122CanUseDockerLoggingConfig() throws Exception { - runContainer( - distribution(), - builder().envVars(Map.of("ES_LOG_STYLE", "console", "ingest.geoip.downloader.enabled", "false", "ELASTIC_PASSWORD", PASSWORD)) - ); + public void test122CanUseDockerLoggingConfig() { + runContainer(distribution(), builder().envVar("ES_LOG_STYLE", "console").envVar("ELASTIC_PASSWORD", PASSWORD)); waitForElasticsearch(installation, USERNAME, PASSWORD); @@ -798,7 +798,7 @@ public void test122CanUseDockerLoggingConfig() throws Exception { * Check that an unknown logging config is rejected */ public void test123CannotUseUnknownLoggingConfig() { - final Result result = runContainerExpectingFailure(distribution(), builder().envVars(Map.of("ES_LOG_STYLE", "unknown"))); + final Result result = runContainerExpectingFailure(distribution(), builder().envVar("ES_LOG_STYLE", "unknown")); assertThat(result.stderr, containsString("ERROR: ES_LOG_STYLE set to [unknown]. Expected [console] or [file]")); } @@ -806,8 +806,8 @@ public void test123CannotUseUnknownLoggingConfig() { /** * Check that it when configuring logging to write to disk, the container can be restarted. */ - public void test124CanRestartContainerWithStackLoggingConfig() throws Exception { - runContainer(distribution(), builder().envVars(Map.of("ES_LOG_STYLE", "file", "ELASTIC_PASSWORD", PASSWORD))); + public void test124CanRestartContainerWithStackLoggingConfig() { + runContainer(distribution(), builder().envVar("ES_LOG_STYLE", "file").envVar("ELASTIC_PASSWORD", PASSWORD)); waitForElasticsearch(installation, USERNAME, PASSWORD); @@ -883,9 +883,7 @@ public void test150MachineDependentHeap() throws Exception { // Now run the container, being explicit about the available memory runContainer( distribution(), - builder().memory("942m") - .volumes(Map.of(jvmOptionsPath, containerJvmOptionsPath)) - .envVars(Map.of("ingest.geoip.downloader.enabled", "false", "ELASTIC_PASSWORD", PASSWORD)) + builder().memory("942m").volume(jvmOptionsPath, containerJvmOptionsPath).envVar("ELASTIC_PASSWORD", PASSWORD) ); waitForElasticsearch(installation, USERNAME, PASSWORD); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index 8b090420213be..e286720007488 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -23,7 +23,6 @@ import java.nio.file.Path; import java.util.Arrays; import java.util.List; -import java.util.Map; import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER; import static org.elasticsearch.packaging.util.Archives.installArchive; @@ -67,7 +66,7 @@ public void test10InstallArchiveDistribution() throws Exception { verifyArchiveInstallation(installation, distribution()); final Installation.Executables bin = installation.executables(); - Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd"); + Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool + " has-passwd"); assertFalse("has-passwd should fail", r.isSuccess()); assertThat("has-passwd should indicate missing keystore", r.stderr, containsString(ERROR_KEYSTORE_NOT_FOUND)); } @@ -82,7 +81,7 @@ public void test11InstallPackageDistribution() throws Exception { verifyPackageInstallation(installation, distribution, sh); final Installation.Executables bin = installation.executables(); - Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd"); + Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool + " has-passwd"); assertFalse("has-passwd should fail", r.isSuccess()); assertThat("has-passwd should indicate unprotected keystore", r.stderr, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED)); Shell.Result r2 = bin.keystoreTool.run("list"); @@ -93,7 +92,7 @@ public void test11InstallPackageDistribution() throws Exception { public void test12InstallDockerDistribution() throws Exception { assumeTrue(distribution().isDocker()); - installation = Docker.runContainer(distribution(), builder().envVars(Map.of("ingest.geoip.downloader.enabled", "false"))); + installation = Docker.runContainer(distribution(), builder()); try { waitForPathToExist(installation.config("elasticsearch.keystore")); @@ -102,7 +101,7 @@ public void test12InstallDockerDistribution() throws Exception { } final Installation.Executables bin = installation.executables(); - Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd"); + Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool + " has-passwd"); assertFalse("has-passwd should fail", r.isSuccess()); assertThat("has-passwd should indicate unprotected keystore", r.stdout, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED)); Shell.Result r2 = bin.keystoreTool.run("list"); @@ -270,16 +269,12 @@ public void test60DockerEnvironmentVariablePassword() throws Exception { Path localConfigDir = getMountedLocalConfDirWithKeystore(password, installation.config); // restart ES with password and mounted config dir containing password protected keystore - Map volumes = Map.of(localConfigDir.resolve("config"), installation.config); - Map envVars = Map.of( - "KEYSTORE_PASSWORD", - password, - "ingest.geoip.downloader.enabled", - "false", - "ELASTIC_PASSWORD", - PASSWORD + runContainer( + distribution(), + builder().volume(localConfigDir.resolve("config"), installation.config) + .envVar("KEYSTORE_PASSWORD", password) + .envVar("ELASTIC_PASSWORD", PASSWORD) ); - runContainer(distribution(), builder().volumes(volumes).envVars(envVars)); waitForElasticsearch(installation, USERNAME, PASSWORD); ServerUtils.runElasticsearchTests(USERNAME, PASSWORD); } @@ -304,18 +299,14 @@ public void test61DockerEnvironmentVariablePasswordFromFile() throws Exception { Path localConfigDir = getMountedLocalConfDirWithKeystore(password, installation.config); // restart ES with password and mounted config dir containing password protected keystore - Map volumes = Map.of(localConfigDir.resolve("config"), installation.config, tempDir, Path.of("/run/secrets")); - Map envVars = Map.of( - "KEYSTORE_PASSWORD_FILE", - "/run/secrets/" + passwordFilename, - "ingest.geoip.downloader.enabled", - "false", - "ELASTIC_PASSWORD", - PASSWORD + runContainer( + distribution(), + builder().volume(localConfigDir.resolve("config"), installation.config) + .volume(tempDir, "/run/secrets") + .envVar("KEYSTORE_PASSWORD_FILE", "/run/secrets/" + passwordFilename) + .envVar("ELASTIC_PASSWORD", PASSWORD) ); - runContainer(distribution(), builder().volumes(volumes).envVars(envVars)); - waitForElasticsearch(installation, USERNAME, PASSWORD); ServerUtils.runElasticsearchTests(USERNAME, PASSWORD); } finally { @@ -337,9 +328,10 @@ public void test62DockerEnvironmentVariableBadPassword() throws Exception { Path localConfigPath = getMountedLocalConfDirWithKeystore(password, installation.config); // restart ES with password and mounted config dir containing password protected keystore - Map volumes = Map.of(localConfigPath.resolve("config"), installation.config); - Map envVars = Map.of("KEYSTORE_PASSWORD", "wrong"); - Shell.Result r = runContainerExpectingFailure(distribution(), builder().volumes(volumes).envVars(envVars)); + Shell.Result r = runContainerExpectingFailure( + distribution(), + builder().volume(localConfigPath.resolve("config"), installation.config).envVar("KEYSTORE_PASSWORD", "wrong") + ); assertThat(r.stderr, containsString(ERROR_INCORRECT_PASSWORD)); } @@ -354,7 +346,6 @@ private Path getMountedLocalConfDirWithKeystore(String password, Path dockerKeys // Mount a temporary directory for copying the keystore Path dockerTemp = Path.of("/usr/tmp/keystore-tmp"); Path tempDirectory = createTempDir(KeystoreManagementTests.class.getSimpleName()); - Map volumes = Map.of(tempDirectory, dockerTemp); // It's very tricky to properly quote a pipeline that you're passing to // a docker exec command, so we're just going to put a small script in the @@ -367,7 +358,7 @@ private Path getMountedLocalConfDirWithKeystore(String password, Path dockerKeys Files.write(tempDirectory.resolve("set-pass.sh"), setPasswordScript); - runContainer(distribution(), builder().volumes(volumes)); + runContainer(distribution(), builder().volume(tempDirectory, dockerTemp)); try { waitForPathToExist(dockerTemp); waitForPathToExist(dockerKeystore); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java index ba014f4c5c474..ff4055997fbdf 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java @@ -463,6 +463,13 @@ public static void waitForElasticsearch(String status, String index, Installatio withLogging(() -> ServerUtils.waitForElasticsearch(status, index, installation, username, password, null)); } + /** + * Waits for the Elasticsearch cluster status to turn green. + * + * @param installation the installation to check + * @param username the username to authenticate with + * @param password the password to authenticate with + */ public static void waitForElasticsearch(Installation installation, String username, String password) { try { waitForElasticsearch("green", null, installation, username, password); @@ -517,6 +524,16 @@ public static JsonNode getJson(String path) throws Exception { return mapper.readTree(pluginsResponse); } + /** + * Fetches the resource from the specified {@code path} on {@code http://localhost:9200}, using + * the supplied authentication credentials. + * + * @param path the path to fetch + * @param user the user to authenticate with + * @param password the password to authenticate with + * @return a parsed JSON response + * @throws Exception if something goes wrong + */ public static JsonNode getJson(String path, String user, String password) throws Exception { path = Objects.requireNonNull(path, "path can not be null").trim(); if (path.isEmpty()) { @@ -585,7 +602,7 @@ public static void restartContainer() { sh.run("docker restart " + containerId); } - public static PosixFileAttributes getAttributes(Path path) throws FileNotFoundException { + static PosixFileAttributes getAttributes(Path path) throws FileNotFoundException { final Shell.Result result = dockerShell.runIgnoreExitCode("stat -c \"%U %G %A\" " + path); if (result.isSuccess() == false) { throw new FileNotFoundException(path + " does not exist"); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index 87c18ee991e63..b92af7e3725cd 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -17,11 +17,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; +import static java.util.Objects.requireNonNull; import static org.elasticsearch.packaging.util.FileExistenceMatchers.fileExists; import static org.hamcrest.MatcherAssert.assertThat; +/** + * A utility class for constructing a {@code docker run} command line from Java. + */ public class DockerRun { private Distribution distribution; @@ -35,28 +38,37 @@ public class DockerRun { private DockerRun() {} public static DockerRun builder() { - return new DockerRun(); + // Disable this setting by default in the Docker tests + return new DockerRun().envVar("ingest.geoip.downloader.enabled", "false"); } public DockerRun distribution(Distribution distribution) { - this.distribution = Objects.requireNonNull(distribution); + this.distribution = requireNonNull(distribution); return this; } - public DockerRun envVars(Map envVars) { - if (envVars != null) { - this.envVars.putAll(envVars); - } + public DockerRun envVar(String key, String value) { + this.envVars.put(requireNonNull(key), requireNonNull(value)); return this; } - public DockerRun volumes(Map volumes) { - if (volumes != null) { - this.volumes.putAll(volumes); - } + public DockerRun volume(Path from, String to) { + this.volumes.put(requireNonNull(from), Path.of(requireNonNull(to))); return this; } + public DockerRun volume(Path from, Path to) { + this.volumes.put(requireNonNull(from), requireNonNull(to)); + return this; + } + + /** + * Sets the UID that the container is run with, and the GID too if specified. + * + * @param uid the UID to use, or {@code null} to use the image default + * @param gid the GID to use, or {@code null} to use the image default + * @return the current builder + */ public DockerRun uid(Integer uid, Integer gid) { if (uid == null) { if (gid != null) { @@ -69,9 +81,7 @@ public DockerRun uid(Integer uid, Integer gid) { } public DockerRun memory(String memoryLimit) { - if (memoryLimit != null) { - this.memory = memoryLimit; - } + this.memory = requireNonNull(memoryLimit); return this; } diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 48c87321e9aaa..c98869d84f471 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -81,6 +81,9 @@ tasks.named("yamlRestTestV7CompatTransform").configure { task -> task.skipTest("search.aggregation/20_terms/string profiler via global ordinals native implementation", "The profiler results aren't backwards compatible.") task.skipTest("search.aggregation/20_terms/string profiler via map", "The profiler results aren't backwards compatible.") task.skipTest("search.aggregation/20_terms/numeric profiler", "The profiler results aren't backwards compatible.") + task.skipTest("nodes.stats/11_indices_metrics/Metric - _all for indices shards", "Muted because we are intentionally making a breaking bugfix. Unmute when #78531 is backported") + task.skipTest("nodes.stats/11_indices_metrics/indices shards total count test", "Muted because we are intentionally making a breaking bugfix. Unmute when #78531 is backported") + task.skipTest("nodes.stats/11_indices_metrics/Metric - blank for indices shards", "Muted because we are intentionally making a breaking bugfix. Unmute when #78531 is backported") task.replaceValueInMatch("_type", "_doc") task.addAllowedWarningRegex("\\[types removal\\].*") diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json index 7a13a6c1033c5..2e19fa1c35dfe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.stats.json @@ -128,7 +128,7 @@ "store", "warmer", "bulk", - "shards" + "shard_stats" ], "description":"Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified." } @@ -177,7 +177,7 @@ "store", "warmer", "bulk", - "shards" + "shard_stats" ], "description":"Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified." }, diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml index 1ef7e81bf16df..ba2b5fdb40a66 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml @@ -109,7 +109,7 @@ - is_false: nodes.$node_id.indices.segments - is_false: nodes.$node_id.indices.translog - is_false: nodes.$node_id.indices.recovery - - is_false: nodes.$node_id.indices.shards + - is_false: nodes.$node_id.indices.shard_stats --- "Metric - multi": @@ -167,7 +167,7 @@ - is_false: nodes.$node_id.indices.segments - is_false: nodes.$node_id.indices.translog - is_true: nodes.$node_id.indices.recovery - - is_false: nodes.$node_id.indices.shards + - is_false: nodes.$node_id.indices.shard_stats --- "Metric - _all include_segment_file_sizes": @@ -225,7 +225,7 @@ - is_true: nodes.$node_id.indices.segments - is_false: nodes.$node_id.indices.translog - is_false: nodes.$node_id.indices.recovery - - is_false: nodes.$node_id.indices.shards + - is_false: nodes.$node_id.indices.shard_stats - is_true: nodes.$node_id.indices.segments.file_sizes --- @@ -257,7 +257,7 @@ - is_true: nodes.$node_id.indices.segments - is_false: nodes.$node_id.indices.translog - is_false: nodes.$node_id.indices.recovery - - is_false: nodes.$node_id.indices.shards + - is_false: nodes.$node_id.indices.shard_stats --- "Metric - _all include_unloaded_segments": @@ -321,10 +321,10 @@ # null and cannot be tested here --- -"Metric - blank for indices shards": +"Metric - blank for indices shard_stats": - skip: features: [arbitrary_key] - version: " - 7.14.99" + version: " - 7.16.1" reason: "total shard count added in version 7.15.0" - do: nodes.info: {} @@ -334,14 +334,14 @@ - do: nodes.stats: {} - - is_true: nodes.$node_id.indices.shards - - match: { nodes.$node_id.indices.shards.total_count: 0 } + - is_true: nodes.$node_id.indices.shard_stats + - match: { nodes.$node_id.indices.shard_stats.total_count: 0 } --- -"Metric - _all for indices shards": +"Metric - _all for indices shard_stats": - skip: features: [arbitrary_key] - version: " - 7.14.99" + version: " - 7.16.1" reason: "total shard count added in version 7.15.0" - do: nodes.info: {} @@ -351,16 +351,16 @@ - do: nodes.stats: { metric: _all } - - is_true: nodes.$node_id.indices.shards - - match: { nodes.$node_id.indices.shards.total_count: 0 } + - is_true: nodes.$node_id.indices.shard_stats + - match: { nodes.$node_id.indices.shard_stats.total_count: 0 } --- -"indices shards total count test": +"indices shard_stats total count test": - skip: features: ["allowed_warnings", arbitrary_key] - version: " - 7.14.99" + version: " - 7.16.1" reason: "total shard count added in version 7.15.0" - do: @@ -387,4 +387,4 @@ - do: nodes.stats: { metric: _all } - - gte: { nodes.$node_id.indices.shards.total_count: 1 } + - gte: { nodes.$node_id.indices.shard_stats.total_count: 1 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchLeakIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchLeakIT.java index 2774f8f6d4459..cdee5449885b7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchLeakIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchLeakIT.java @@ -66,7 +66,6 @@ private int indexDocs(Client client, String field, String index) { *
  • scroll vs no scroll
  • * */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78673") public void testSearch() throws Exception { assertAcked(client(LOCAL_CLUSTER).admin().indices().prepareCreate("demo") .setMapping("f", "type=keyword") @@ -105,7 +104,7 @@ public void testSearch() throws Exception { searchRequest.allowPartialSearchResults(false); boolean scroll = randomBoolean(); searchRequest.source(new SearchSourceBuilder().query(new MatchAllQueryBuilder()) - .aggregation(terms("f").field("f").size(docs + between(scroll ? 1 : 0, 10))).size(between(0, 1000))); + .aggregation(terms("f").field("f").size(docs + between(0, 10))).size(between(scroll ? 1 : 0, 1000))); if (scroll) { searchRequest.scroll("30s"); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 5ee11e0e9a657..34af1f1a26216 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -35,13 +35,11 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; import java.time.Instant; import java.time.ZoneOffset; import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; @@ -50,6 +48,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -921,10 +920,10 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen("test"); - DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); - org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); + ZonedDateTime date = ZonedDateTime.of(1990, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("uuuu-MM-dd", Locale.ROOT); - indexDoc("test", "1", "text_field", "foo", "date_field", formatter.print(date)); + indexDoc("test", "1", "text_field", "foo", "date_field", formatter.format(date)); refresh("test"); SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) @@ -984,10 +983,10 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen("test"); - DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); - org.joda.time.format.DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); + ZonedDateTime date = ZonedDateTime.of(1990, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("uuuu-MM-dd", Locale.ROOT); - indexDoc("test", "1", "text_field", "foo", "date_field", formatter.print(date)); + indexDoc("test", "1", "text_field", "foo", "date_field", formatter.format(date)); refresh("test"); SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index 0937832f0bea5..1be4f21f82935 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -25,9 +25,9 @@ import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.test.ESIntegTestCase; -import org.joda.time.Instant; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index 5143e6c9dab42..9a111642f01d4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -405,9 +405,9 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(INDICES_FIELD.getPreferredName(), indices); - builder.field(ALIASES_FIELD.getPreferredName(), aliases); - builder.field(DATA_STREAMS_FIELD.getPreferredName(), dataStreams); + builder.xContentList(INDICES_FIELD.getPreferredName(), indices); + builder.xContentList(ALIASES_FIELD.getPreferredName(), aliases); + builder.xContentList(DATA_STREAMS_FIELD.getPreferredName(), dataStreams); builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java index fc1238a3d3486..31d507aac5e27 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java @@ -214,7 +214,7 @@ public enum Flag { RequestCache("request_cache", 15), Recovery("recovery", 16), Bulk("bulk", 17), - Shards("shards", 18); + Shards("shard_stats", 18); private final String restName; private final int index; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index 1e2484a88bbc6..6f8a561199184 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -87,7 +87,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (Map.Entry> entry : overlappingTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), entry.getKey()); - builder.field(INDEX_PATTERNS.getPreferredName(), entry.getValue()); + builder.stringListField(INDEX_PATTERNS.getPreferredName(), entry.getValue()); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index 048a0e4898154..14e9e70428651 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -125,13 +125,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(SEARCHABLE_FIELD.getPreferredName(), isSearchable); builder.field(AGGREGATABLE_FIELD.getPreferredName(), isAggregatable); if (indices != null) { - builder.field(INDICES_FIELD.getPreferredName(), indices); + builder.array(INDICES_FIELD.getPreferredName(), indices); } if (nonSearchableIndices != null) { - builder.field(NON_SEARCHABLE_INDICES_FIELD.getPreferredName(), nonSearchableIndices); + builder.array(NON_SEARCHABLE_INDICES_FIELD.getPreferredName(), nonSearchableIndices); } if (nonAggregatableIndices != null) { - builder.field(NON_AGGREGATABLE_INDICES_FIELD.getPreferredName(), nonAggregatableIndices); + builder.array(NON_AGGREGATABLE_INDICES_FIELD.getPreferredName(), nonAggregatableIndices); } if (meta.isEmpty() == false) { builder.startObject("meta"); @@ -140,7 +140,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (Map.Entry> entry : entries) { List values = new ArrayList<>(entry.getValue()); values.sort(String::compareTo); // provide predictable order - builder.field(entry.getKey(), values); + builder.stringListField(entry.getKey(), values); } builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java index d91a963775c77..c436a6ef59413 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFailure.java @@ -46,7 +46,7 @@ public FieldCapabilitiesFailure(StreamInput in) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(INDICES_FIELD.getPreferredName(), indices); + builder.stringListField(INDICES_FIELD.getPreferredName(), indices); builder.startObject(FAILURE_FIELD.getPreferredName()); { ElasticsearchException.generateFailureXContent(builder, params, exception, true); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java index 4cb626607224c..8442e707b74d6 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -151,11 +151,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws throw new IllegalStateException("cannot serialize non-merged response"); } builder.startObject(); - builder.field(INDICES_FIELD.getPreferredName(), indices); + builder.array(INDICES_FIELD.getPreferredName(), indices); builder.field(FIELDS_FIELD.getPreferredName(), responseMap); if (this.failures.size() > 0) { builder.field(FAILED_INDICES_FIELD.getPreferredName(), getFailedIndices().length); - builder.field(FAILURES_FIELD.getPreferredName(), failures); + builder.xContentList(FAILURES_FIELD.getPreferredName(), failures); } builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index f0736230ee133..52bbab56dd96c 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -192,7 +192,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(INDEX.getPreferredName(), index); builder.field(ID.getPreferredName(), id); builder.field(ROUTING.getPreferredName(), routing); - builder.field(STORED_FIELDS.getPreferredName(), storedFields); + builder.array(STORED_FIELDS.getPreferredName(), storedFields); builder.field(VERSION.getPreferredName(), version); builder.field(VERSION_TYPE.getPreferredName(), VersionType.toString(versionType)); builder.field(SOURCE.getPreferredName(), fetchSourceContext); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationMetadata.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationMetadata.java index b7cb9371414f8..5065342558282 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationMetadata.java @@ -115,7 +115,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws .field(TERM_PARSE_FIELD.getPreferredName(), term) .field(LAST_COMMITTED_CONFIGURATION_FIELD.getPreferredName(), lastCommittedConfiguration) .field(LAST_ACCEPTED_CONFIGURATION_FIELD.getPreferredName(), lastAcceptedConfiguration) - .field(VOTING_CONFIG_EXCLUSIONS_FIELD.getPreferredName(), votingConfigExclusions); + .xContentList(VOTING_CONFIG_EXCLUSIONS_FIELD.getPreferredName(), votingConfigExclusions); } public static CoordinationMetadata fromXContent(XContentParser parser) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 0f76a98bfd17f..99dc022ab6992 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -199,12 +199,12 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(INDEX_PATTERNS.getPreferredName(), this.indexPatterns); + builder.stringListField(INDEX_PATTERNS.getPreferredName(), this.indexPatterns); if (this.template != null) { builder.field(TEMPLATE.getPreferredName(), this.template); } if (this.componentTemplates != null) { - builder.field(COMPOSED_OF.getPreferredName(), this.componentTemplates); + builder.stringListField(COMPOSED_OF.getPreferredName(), this.componentTemplates); } if (this.priority != null) { builder.field(PRIORITY.getPreferredName(), priority); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index d6e074f481635..2c826074b43a3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -401,7 +401,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(NAME_FIELD.getPreferredName(), name); builder.field(TIMESTAMP_FIELD_FIELD.getPreferredName(), timeStampField); - builder.field(INDICES_FIELD.getPreferredName(), indices); + builder.xContentList(INDICES_FIELD.getPreferredName(), indices); builder.field(GENERATION_FIELD.getPreferredName(), generation); if (metadata != null) { builder.field(METADATA_FIELD.getPreferredName(), metadata); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java index 5cc4555cb18ed..1276ccddda69f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java @@ -275,7 +275,7 @@ public static DataStreamAlias fromXContent(XContentParser parser) throws IOExcep @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name); - builder.field(DATA_STREAMS_FIELD.getPreferredName(), dataStreams); + builder.stringListField(DATA_STREAMS_FIELD.getPreferredName(), dataStreams); if (writeDataStream != null) { builder.field(WRITE_DATA_STREAM_FIELD.getPreferredName(), writeDataStream); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java index 611e7a78e7064..d3df1a692f16e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java @@ -121,11 +121,7 @@ public static DataStreamMetadata fromXContent(XContentParser parser) throws IOEx @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(DATA_STREAM.getPreferredName()); - for (Map.Entry dataStream : dataStreams.entrySet()) { - builder.field(dataStream.getKey(), dataStream.getValue()); - } - builder.endObject(); + builder.xContentValuesMap(DATA_STREAM.getPreferredName(), dataStreams); builder.startObject(DATA_STREAM_ALIASES.getPreferredName()); for (Map.Entry dataStream : dataStreamAliases.entrySet()) { dataStream.getValue().toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 98f8dea776ace..022771ccd9ca1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -1396,8 +1396,7 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build } for (ObjectObjectCursor cursor : indexMetadata.customData) { - builder.field(cursor.key); - builder.map(cursor.value); + builder.stringStringMap(cursor.key, cursor.value); } if (context != Metadata.XContentContext.API) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java index 60a1473e97fe0..785fa69e5adee 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetadata.java @@ -377,7 +377,7 @@ private static void toInnerXContent(IndexTemplateMetadata indexTemplateMetadata, if (indexTemplateMetadata.version() != null) { builder.field("version", indexTemplateMetadata.version()); } - builder.field("index_patterns", indexTemplateMetadata.patterns()); + builder.stringListField("index_patterns", indexTemplateMetadata.patterns()); builder.startObject("settings"); indexTemplateMetadata.settings().toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ItemUsage.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ItemUsage.java index 993ae141e3466..8d3008b6ca162 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ItemUsage.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ItemUsage.java @@ -77,13 +77,13 @@ public Set getComposableTemplates() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (this.indices != null) { - builder.field("indices", this.indices); + builder.stringListField("indices", this.indices); } if (this.dataStreams != null) { - builder.field("data_streams", this.dataStreams); + builder.stringListField("data_streams", this.dataStreams); } if (this.composableTemplates != null) { - builder.field("composable_templates", this.composableTemplates); + builder.stringListField("composable_templates", this.composableTemplates); } builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Manifest.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Manifest.java index c7640b61ba4fd..d13c56dbb8252 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Manifest.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Manifest.java @@ -135,7 +135,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CURRENT_TERM_PARSE_FIELD.getPreferredName(), currentTerm); builder.field(CLUSTER_STATE_VERSION_PARSE_FIELD.getPreferredName(), clusterStateVersion); builder.field(GENERATION_PARSE_FIELD.getPreferredName(), globalGeneration); - builder.array(INDEX_GENERATIONS_PARSE_FIELD.getPreferredName(), indexEntryList().toArray()); + builder.xContentList(INDEX_GENERATIONS_PARSE_FIELD.getPreferredName(), indexEntryList()); return builder; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index bc36fb5abfc3f..aaa22320d1a7f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -515,7 +515,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("failed_attempts", failedAllocations); } if (failedNodeIds.isEmpty() == false) { - builder.field("failed_nodes", failedNodeIds); + builder.stringListField("failed_nodes", failedNodeIds); } builder.field("delayed", delayed); String details = getDetails(); diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index e8a2144e9e4a5..01d75211e3ed1 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -572,7 +572,7 @@ public final boolean match(String toTest) { public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("key", getKey()); - builder.field("properties", properties); + builder.enumSet("properties", properties); builder.field("is_group_setting", isGroupSetting()); builder.field("default", defaultValue.apply(Settings.EMPTY)); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 4f572db20a47b..5d8db2c384d12 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -1022,12 +1022,19 @@ public Set availableIndexFoldersForPath(final NodePath nodePath, Predica /** * Resolves all existing paths to indexFolderName in ${data.paths}/indices */ - public Path resolveIndexFolder(String indexFolderName) { + public Path[] resolveIndexFolder(String indexFolderName) { if (nodePaths == null || locks == null) { throw new IllegalStateException("node is not configured to store local location"); } assertEnvIsLocked(); - return nodePaths[0].indicesPath.resolve(indexFolderName); + List paths = new ArrayList<>(nodePaths.length); + for (NodePath nodePath : nodePaths) { + Path indexFolder = nodePath.indicesPath.resolve(indexFolderName); + if (Files.exists(indexFolder)) { + paths.add(indexFolder); + } + } + return paths.toArray(new Path[paths.size()]); } /** @@ -1290,22 +1297,22 @@ public static Path shardStatePathToDataPath(Path shardPath) { private void assertCanWrite() throws IOException { tryWriteTempFile(nodeDataPath()); for (String indexFolderName : this.availableIndexFolders()) { - // check index paths are writable - Path indexPath = this.resolveIndexFolder(indexFolderName); - Path indexStatePath = indexPath.resolve(MetadataStateFormat.STATE_DIR_NAME); - tryWriteTempFile(indexStatePath); - tryWriteTempFile(indexPath); - try (DirectoryStream stream = Files.newDirectoryStream(indexPath)) { - for (Path shardPath : stream) { - String fileName = shardPath.getFileName().toString(); - if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) { - Path indexDir = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME); - Path statePath = shardPath.resolve(MetadataStateFormat.STATE_DIR_NAME); - Path translogDir = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); - tryWriteTempFile(indexDir); - tryWriteTempFile(translogDir); - tryWriteTempFile(statePath); - tryWriteTempFile(shardPath); + for (Path indexPath : this.resolveIndexFolder(indexFolderName)) { // check index paths are writable + Path indexStatePath = indexPath.resolve(MetadataStateFormat.STATE_DIR_NAME); + tryWriteTempFile(indexStatePath); + tryWriteTempFile(indexPath); + try (DirectoryStream stream = Files.newDirectoryStream(indexPath)) { + for (Path shardPath : stream) { + String fileName = shardPath.getFileName().toString(); + if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) { + Path indexDir = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME); + Path statePath = shardPath.resolve(MetadataStateFormat.STATE_DIR_NAME); + Path translogDir = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); + tryWriteTempFile(indexDir); + tryWriteTempFile(translogDir); + tryWriteTempFile(statePath); + tryWriteTempFile(shardPath); + } } } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index eed32a6126190..97d9707814e9d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -74,7 +74,7 @@ public FieldComparator newComparator(String fieldname, int numHits, int sortP final double dMissingValue = (Double) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new DoubleComparator(numHits, null, null, reversed, sortPos) { + DoubleComparator comparator = new DoubleComparator(numHits, null, null, reversed, sortPos) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { @@ -90,6 +90,9 @@ public void setScorer(Scorable scorer) { }; } }; + // TODO: when LUCENE-10154 is available, instead of disableSkipping this comparator should implement `getPointValue` + comparator.disableSkipping(); + return comparator; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 9906075587c1c..157eb475fa5fe 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -67,7 +67,7 @@ public FieldComparator newComparator(String fieldname, int numHits, int sortP final float fMissingValue = (Float) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new FloatComparator(numHits, null, null, reversed, sortPos) { + FloatComparator comparator = new FloatComparator(numHits, null, null, reversed, sortPos) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new FloatLeafComparator(context) { @@ -78,6 +78,9 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String }; } }; + // TODO: when LUCENE-10154 is available, instead of disableSkipping this comparator should implement `getPointValue` + comparator.disableSkipping(); + return comparator; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index d237819502006..57244b464bfbc 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -90,7 +90,7 @@ public FieldComparator newComparator(String fieldname, int numHits, int sortP final long lMissingValue = (Long) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new LongComparator(numHits, null, null, reversed, sortPos) { + LongComparator comparator = new LongComparator(numHits, null, null, reversed, sortPos) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { @@ -101,6 +101,9 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String }; } }; + // TODO: when LUCENE-10154 is available, instead of disableSkipping this comparator should implement `getPointValue` + comparator.disableSkipping(); + return comparator; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardCountStats.java b/server/src/main/java/org/elasticsearch/index/shard/ShardCountStats.java index 5ea65c35691ea..fe02534d9bf7d 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardCountStats.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardCountStats.java @@ -56,7 +56,7 @@ public void writeTo(StreamOutput out) throws IOException { } static final class Fields { - static final String SHARDS = "shards"; + static final String SHARDS = "shard_stats"; static final String TOTAL_COUNT = "total_count"; } diff --git a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java index f3ae4ae2d85b4..7dd4ee0218582 100644 --- a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java +++ b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java @@ -69,8 +69,8 @@ public enum LuceneFilesExtensions { TVM("tvm", "Term Vector Metadata", true, false), TVX("tvx", "Term Vector Index", false, false), // kNN vectors format - VEC("vec", "Vector Data", false, false), - VEX("vex", "Vector Index", false, false), + VEC("vec", "Vector Data", false, true), + VEX("vex", "Vector Index", false, true), VEM("vem", "Vector Metadata", true, false); /** diff --git a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java index a571391e7a1e4..f1e157a617c7b 100644 --- a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java +++ b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java @@ -28,7 +28,7 @@ public class StoreFileMetadata implements Writeable { public static final BytesRef UNAVAILABLE_WRITER_UUID = new BytesRef(); - private static final org.elasticsearch.Version WRITER_UUID_MIN_VERSION = org.elasticsearch.Version.V_8_0_0; + private static final org.elasticsearch.Version WRITER_UUID_MIN_VERSION = org.elasticsearch.Version.V_7_16_0; private final String name; diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 0a41ccace73be..43de0e37030d8 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -743,11 +743,7 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final } builder.endArray(); if (shouldWriteShardGens) { - builder.startArray(SHARD_GENERATIONS); - for (ShardGeneration gen : shardGenerations.getGens(indexId)) { - builder.value(gen); - } - builder.endArray(); + builder.xContentList(SHARD_GENERATIONS, shardGenerations.getGens(indexId)); } builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java index ba469d346f26a..d7c4ecc8d6c2e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/OrdinalValuesSource.java @@ -25,6 +25,12 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector; import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.function.LongConsumer; import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; @@ -50,6 +56,9 @@ class OrdinalValuesSource extends SingleDimensionValuesSource { private final LongConsumer breakerConsumer; // track how much bytes are stored in the values array private final CheckedFunction docValuesFunc; + // doc-values lookup, cached by LeafReaderContext ordinal + private final Map dvsLookup = new HashMap<>(); + private SortedSetDocValues lookup; // current ordinals lookup private int leafReaderOrd = -1; // current LeafReaderContext ordinal @@ -245,22 +254,32 @@ BytesRef toComparable(int slot) throws IOException { LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollector next) throws IOException { final boolean leafReaderContextChanged = context.ord != leafReaderOrd; assert leafReaderContextChanged == false || invariant(); // for performance reasons only check invariant upon change - final SortedSetDocValues dvs = docValuesFunc.apply(context); if (leafReaderContextChanged) { - remapOrdinals(lookup, dvs); + // use a separate instance for ordinal and term lookups, that is cached per segment + // to speed up sorted collections that call getLeafCollector once per term (see above) + final SortedSetDocValues newLookup = dvsLookup.computeIfAbsent(context.ord, k -> { + try { + return docValuesFunc.apply(context); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + remapOrdinals(lookup, newLookup); + lookup = newLookup; leafReaderOrd = context.ord; } - lookup = dvs; + + // and creates a SortedSetDocValues to iterate over the values + final SortedSetDocValues it = docValuesFunc.apply(context); assert leafReaderContextChanged == false || invariant(); // for performance reasons only check invariant upon change return new LeafBucketCollector() { @Override public void collect(int doc, long bucket) throws IOException { // caller of getLeafCollector ensures that collection happens before requesting a new leaf collector // this is important as ordinals only make sense in the context of the current lookup - assert dvs == lookup; - if (dvs.advanceExact(doc)) { + if (it.advanceExact(doc)) { long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + while ((ord = it.nextOrd()) != NO_MORE_ORDS) { currentValueOrd = ord; currentValueUnmapped = null; next.collect(doc, bucket); @@ -283,38 +302,51 @@ LeafBucketCollector getLeafCollector(Comparable value, LeafReaderConte throw new IllegalArgumentException("Expected BytesRef, got " + value.getClass()); } BytesRef term = (BytesRef) value; - final SortedSetDocValues dvs = docValuesFunc.apply(context); if (leafReaderContextChanged) { - remapOrdinals(lookup, dvs); - leafReaderOrd = context.ord; + // use a separate instance for ordinal and term lookups, that is cached per segment + // to speed up sorted collections that call getLeafCollector once per term + final SortedSetDocValues newLookup = dvsLookup.computeIfAbsent(context.ord, k -> { + try { + return docValuesFunc.apply(context); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + remapOrdinals(lookup, newLookup); + lookup = newLookup; } - lookup = dvs; + currentValueOrd = lookup.lookupTerm(term); + currentValueUnmapped = null; + leafReaderOrd = context.ord; + assert currentValueOrd >= 0; assert leafReaderContextChanged == false || invariant(); // for performance reasons only check invariant upon change - return new LeafBucketCollector() { - boolean currentValueIsSet = false; + return next; + } - @Override - public void collect(int doc, long bucket) throws IOException { - // caller of getLeafCollector ensures that collection happens before requesting a new leaf collector - // this is important as ordinals only make sense in the context of the current lookup - assert dvs == lookup; - if (currentValueIsSet == false) { - if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { - if (term.equals(dvs.lookupOrd(ord))) { - currentValueIsSet = true; - currentValueOrd = ord; - currentValueUnmapped = null; - break; - } - } - } - } - assert currentValueIsSet; - next.collect(doc, bucket); + private static class Slot implements Comparable { + final int index; + final long ord; + final BytesRef unmapped; + + private Slot(int index, long ord, BytesRef unmapped) { + assert ord >= 0 || unmapped != null; + this.index = index; + this.ord = ord; + this.unmapped = unmapped; + } + + @Override + public int compareTo(Slot other) { + if (ord < 0 && ord == other.ord) { + assert unmapped != null && other.unmapped != null; + // compare by original term if both ordinals are insertion points (negative value) + return unmapped.compareTo(other.unmapped); } - }; + long norm1 = ord < 0 ? -ord - 1 : ord; + long norm2 = other.ord < 0 ? -other.ord - 1 : other.ord; + int cmp = Long.compare(norm1, norm2); + return cmp == 0 ? Long.compare(ord, other.ord) : cmp; + } } /** @@ -322,24 +354,49 @@ public void collect(int doc, long bucket) throws IOException { * in that case remember the term so that future remapping steps can accurately be done. */ private void remapOrdinals(SortedSetDocValues oldMapping, SortedSetDocValues newMapping) throws IOException { + // speed up the lookups by sorting ordinals first + List sorted = new ArrayList<>(); for (int i = 0; i < numSlots; i++) { - final long oldOrd = valuesOrd.get(i); - if (oldOrd != Long.MIN_VALUE) { - final long newOrd; - if (oldOrd >= 0) { - final BytesRef newVal = oldMapping.lookupOrd(oldOrd); - newOrd = newMapping.lookupTerm(newVal); + long ord = valuesOrd.get(i); + if (ord != Long.MIN_VALUE) { + sorted.add(new Slot(i, ord, ord < 0 ? valuesUnmapped.get(i) : null)); + } + } + Collections.sort(sorted); + + long lastOldOrd = Long.MIN_VALUE; + long lastNewOrd = Long.MIN_VALUE; + BytesRef lastUnmapped = null; + for (Slot slot : sorted) { + final long index = slot.index; + final long oldOrd = slot.ord; + final BytesRef unmapped = slot.unmapped; + final long newOrd; + if (oldOrd >= 0) { + if (lastOldOrd == oldOrd) { + newOrd = lastNewOrd; if (newOrd < 0) { - setValueWithBreaking(i, BytesRef.deepCopyOf(newVal)); + setValueWithBreaking(index, lastUnmapped); } } else { - newOrd = newMapping.lookupTerm(valuesUnmapped.get(i)); - if (newOrd >= 0) { - setValueWithBreaking(i, null); + final BytesRef newVal = oldMapping.lookupOrd(oldOrd); + newOrd = newMapping.lookupTerm(newVal); + if (newOrd < 0) { + setValueWithBreaking(index, BytesRef.deepCopyOf(newVal)); } } - valuesOrd.set(i, newOrd); + } else { + // the original term is missing in the dictionary + assert unmapped != null; + newOrd = newMapping.lookupTerm(unmapped); + if (newOrd >= 0) { + setValueWithBreaking(index, null); + } } + lastOldOrd = oldOrd; + lastNewOrd = newOrd; + lastUnmapped = valuesUnmapped.get(index); + valuesOrd.set(index, newOrd); } if (currentValueOrd != null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java index 7684e0d4484ba..2c1a4a0e95967 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java @@ -149,7 +149,7 @@ protected void validate(ValidationContext context) { @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(SearchSourceBuilder.SORT_FIELD.getPreferredName(), sorts); + builder.xContentList(SearchSourceBuilder.SORT_FIELD.getPreferredName(), sorts); builder.field(FROM.getPreferredName(), from); if (size != null) { builder.field(SIZE.getPreferredName(), size); diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 360e6e41925aa..2d77f1a438f15 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -1425,7 +1425,7 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t } if (stats != null) { - builder.field(STATS_FIELD.getPreferredName(), stats); + builder.stringListField(STATS_FIELD.getPreferredName(), stats); } if (extBuilders != null && extBuilders.isEmpty() == false) { diff --git a/server/src/main/java/org/elasticsearch/transport/TransportInfo.java b/server/src/main/java/org/elasticsearch/transport/TransportInfo.java index e952a563d333d..4608f4c116a12 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportInfo.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportInfo.java @@ -102,7 +102,7 @@ private String formatPublishAddressString(String propertyName, TransportAddress @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.TRANSPORT); - builder.array(Fields.BOUND_ADDRESS, (Object[]) address.boundAddresses()); + builder.xContentList(Fields.BOUND_ADDRESS, address.boundAddresses()); builder.field(Fields.PUBLISH_ADDRESS, formatPublishAddressString("transport.publish_address", address.publishAddress())); builder.startObject(Fields.PROFILES); if (profileAddresses != null && profileAddresses.size() > 0) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index af53179b8475c..41161d60486f2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -16,18 +16,20 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Locale; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.joda.time.DateTimeZone.UTC; public class DateMathExpressionResolverTests extends ESTestCase { @@ -37,6 +39,15 @@ public class DateMathExpressionResolverTests extends ESTestCase { SystemIndexAccessLevel.NONE ); + private static ZonedDateTime dateFromMillis(long millis) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); + } + + private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { + DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + return dateFormatter.format(zonedDateTime); + } + public void testNormal() throws Exception { int numIndexExpressions = randomIntBetween(1, 9); List indexExpressions = new ArrayList<>(numIndexExpressions); @@ -55,11 +66,11 @@ public void testExpression() throws Exception { List result = expressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(3)); assertThat(result.get(0), - equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); assertThat(result.get(1), - equalTo(".watch_history-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); assertThat(result.get(2), - equalTo("logstash-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testEmpty() throws Exception { @@ -77,30 +88,30 @@ public void testExpression_MultiParts() throws Exception { List result = expressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); assertThat(result.size(), equalTo(1)); assertThat(result.get(0), equalTo(".text1-" - + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC)) + + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "-text2-" - + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC).withDayOfMonth(1)))); + + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); } public void testExpression_CustomFormat() throws Exception { List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); assertThat(results.size(), equalTo(1)); assertThat(results.get(0), - equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeStatic() throws Exception { List result = expressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); assertThat(result.size(), equalTo(1)); assertThat(result.get(0), - equalTo(".mar{v}el-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeDateFormat() throws Exception { List result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); assertThat(result.size(), equalTo(1)); assertThat(result.get(0), - equalTo(".marvel-" + DateTimeFormat.forPattern("'{year}'yyyy").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); } public void testExpression_MixedArray() throws Exception { @@ -110,39 +121,42 @@ public void testExpression_MixedArray() throws Exception { assertThat(result.size(), equalTo(4)); assertThat(result.get(0), equalTo("name1")); assertThat(result.get(1), - equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); assertThat(result.get(2), equalTo("name2")); assertThat(result.get(3), equalTo(".logstash-" + - DateTimeFormat.forPattern("yyyy.MM").print(new DateTime(context.getStartTime(), UTC).withDayOfMonth(1)))); + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); } - + public void testExpression_CustomTimeZoneInIndexName() throws Exception { - DateTimeZone timeZone; + ZoneId timeZone; int hoursOffset; int minutesOffset = 0; if (randomBoolean()) { hoursOffset = randomIntBetween(-12, 14); - timeZone = DateTimeZone.forOffsetHours(hoursOffset); + timeZone = ZoneOffset.ofHours(hoursOffset); } else { hoursOffset = randomIntBetween(-11, 13); minutesOffset = randomIntBetween(0, 59); - timeZone = DateTimeZone.forOffsetHoursMinutes(hoursOffset, minutesOffset); + if (hoursOffset < 0) { + minutesOffset = -minutesOffset; + } + timeZone = ZoneOffset.ofHoursMinutes(hoursOffset, minutesOffset); } - DateTime now; + ZonedDateTime now; if (hoursOffset >= 0) { // rounding to next day 00:00 - now = DateTime.now(UTC).plusHours(hoursOffset).plusMinutes(minutesOffset) - .withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); + now = ZonedDateTime.now(ZoneOffset.UTC).plusHours(hoursOffset).plusMinutes(minutesOffset) + .withHour(0).withMinute(0).withSecond(0); } else { // rounding to today 00:00 - now = DateTime.now(UTC).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); + now = ZonedDateTime.now(ZoneOffset.UTC).withHour(0).withMinute(0).withSecond(0); } - Context context = new Context(this.context.getState(), this.context.getOptions(), now.getMillis(), + Context context = new Context(this.context.getState(), this.context.getOptions(), now.toInstant().toEpochMilli(), SystemIndexAccessLevel.NONE, name -> false, name -> false); - List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getID() + "}}>")); + List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>")); assertThat(results.size(), equalTo(1)); logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(now.withZone(timeZone)))); + assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } public void testExpressionInvalidUnescaped() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/common/RoundingTests.java b/server/src/test/java/org/elasticsearch/common/RoundingTests.java deleted file mode 100644 index c5591b2cc2ebf..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/RoundingTests.java +++ /dev/null @@ -1,1203 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.common; - -import org.elasticsearch.core.Tuple; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; - -import java.time.Instant; -import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.time.temporal.TemporalAccessor; -import java.time.zone.ZoneOffsetTransition; -import java.time.zone.ZoneOffsetTransitionRule; -import java.time.zone.ZoneRules; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import static java.util.stream.Collectors.toList; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThan; -import static org.hamcrest.Matchers.lessThanOrEqualTo; - -public class RoundingTests extends ESTestCase { - - public void testUTCTimeUnitRounding() { - Rounding tzRounding = Rounding.builder(Rounding.DateTimeUnit.MONTH_OF_YEAR).build(); - ZoneId tz = ZoneOffset.UTC; - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-01T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-01T00:00:00.000Z")), isDate(time("2009-03-01T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-09T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-16T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.QUARTER_OF_YEAR).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-01T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-04-01T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T01:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-09T01:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-10T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.YEAR_OF_CENTURY).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-01T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2013-01-01T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.MINUTES_OF_HOUR).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T01:01:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-09T00:01:00.000Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.SECOND_OF_MINUTE).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T01:01:01.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-09T00:00:01.000Z"), tz)); - } - - public void testUTCIntervalRounding() { - Rounding tzRounding = Rounding.builder(TimeValue.timeValueHours(12)).build(); - ZoneId tz = ZoneOffset.UTC; - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T00:00:00.000Z")), isDate(time("2009-02-03T12:00:00.000Z"), tz)); - assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T12:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T12:00:00.000Z")), isDate(time("2009-02-04T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(TimeValue.timeValueHours(48)).build(); - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T00:00:00.000Z")), isDate(time("2009-02-05T00:00:00.000Z"), tz)); - assertThat(tzRounding.round(time("2009-02-05T13:01:01")), isDate(time("2009-02-05T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-05T00:00:00.000Z")), isDate(time("2009-02-07T00:00:00.000Z"), tz)); - } - - /** - * test TimeIntervalRounding, (interval < 12h) with time zone shift - */ - public void testTimeIntervalRounding() { - ZoneId tz = ZoneOffset.ofHours(-1); - Rounding tzRounding = Rounding.builder(TimeValue.timeValueHours(6)).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T00:01:01")), isDate(time("2009-02-02T19:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-02T19:00:00.000Z")), isDate(time("2009-02-03T01:00:00.000Z"), tz)); - - assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T13:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T13:00:00.000Z")), isDate(time("2009-02-03T19:00:00.000Z"), tz)); - } - - /** - * test DayIntervalRounding, (interval >= 12h) with time zone shift - */ - public void testDayIntervalRounding() { - ZoneId tz = ZoneOffset.ofHours(-8); - Rounding tzRounding = Rounding.builder(TimeValue.timeValueHours(12)).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T00:01:01")), isDate(time("2009-02-02T20:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-02T20:00:00.000Z")), isDate(time("2009-02-03T08:00:00.000Z"), tz)); - - assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T08:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T08:00:00.000Z")), isDate(time("2009-02-03T20:00:00.000Z"), tz)); - } - - public void testDayRounding() { - int timezoneOffset = -2; - Rounding tzRounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH) - .timeZone(ZoneOffset.ofHours(timezoneOffset)).build(); - assertThat(tzRounding.round(0), equalTo(0L - TimeValue.timeValueHours(24 + timezoneOffset).millis())); - assertThat(tzRounding.nextRoundingValue(0L - TimeValue.timeValueHours(24 + timezoneOffset).millis()), equalTo(TimeValue - .timeValueHours(-timezoneOffset).millis())); - - ZoneId tz = ZoneId.of("-08:00"); - tzRounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2012-04-01T04:15:30Z")), isDate(time("2012-03-31T08:00:00Z"), tz)); - - tzRounding = Rounding.builder(Rounding.DateTimeUnit.MONTH_OF_YEAR).timeZone(tz).build(); - assertThat(tzRounding.round(time("2012-04-01T04:15:30Z")), equalTo(time("2012-03-01T08:00:00Z"))); - - // date in Feb-3rd, but still in Feb-2nd in -02:00 timezone - tz = ZoneId.of("-02:00"); - tzRounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-02T02:00:00"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-02T02:00:00")), isDate(time("2009-02-03T02:00:00"), tz)); - - // date in Feb-3rd, also in -02:00 timezone - tzRounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T02:01:01")), isDate(time("2009-02-03T02:00:00"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T02:00:00")), isDate(time("2009-02-04T02:00:00"), tz)); - } - - public void testTimeRounding() { - // hour unit - ZoneId tz = ZoneOffset.ofHours(-2); - Rounding tzRounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build(); - assertThat(tzRounding.round(0), equalTo(0L)); - assertThat(tzRounding.nextRoundingValue(0L), equalTo(TimeValue.timeValueHours(1L).getMillis())); - - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T01:00:00"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T01:00:00")), isDate(time("2009-02-03T02:00:00"), tz)); - } - - public void testTimeUnitRoundingDST() { - Rounding tzRounding; - // testing savings to non savings switch - ZoneId cet = ZoneId.of("CET"); - tzRounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(cet).build(); - assertThat(tzRounding.round(time("2014-10-26T01:01:01", cet)), isDate(time("2014-10-26T01:00:00+02:00"), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", cet)),isDate(time("2014-10-26T02:00:00+02:00"), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", cet)), isDate(time("2014-10-26T02:00:00+01:00"), cet)); - - // testing non savings to savings switch - tzRounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(cet).build(); - assertThat(tzRounding.round(time("2014-03-30T01:01:01", cet)), isDate(time("2014-03-30T01:00:00+01:00"), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", cet)), isDate(time("2014-03-30T03:00:00", cet), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-03-30T03:00:00", cet)), isDate(time("2014-03-30T04:00:00", cet), cet)); - - // testing non savings to savings switch (America/Chicago) - ZoneId chg = ZoneId.of("America/Chicago"); - Rounding tzRounding_utc = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY) - .timeZone(ZoneOffset.UTC).build(); - assertThat(tzRounding.round(time("2014-03-09T03:01:01", chg)), isDate(time("2014-03-09T03:00:00", chg), chg)); - - Rounding tzRounding_chg = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(chg).build(); - assertThat(tzRounding_chg.round(time("2014-03-09T03:01:01", chg)), isDate(time("2014-03-09T03:00:00", chg), chg)); - - // testing savings to non savings switch 2013 (America/Chicago) - assertThat(tzRounding_utc.round(time("2013-11-03T06:01:01", chg)), isDate(time("2013-11-03T06:00:00", chg), chg)); - assertThat(tzRounding_chg.round(time("2013-11-03T06:01:01", chg)), isDate(time("2013-11-03T06:00:00", chg), chg)); - - // testing savings to non savings switch 2014 (America/Chicago) - assertThat(tzRounding_utc.round(time("2014-11-02T06:01:01", chg)), isDate(time("2014-11-02T06:00:00", chg), chg)); - assertThat(tzRounding_chg.round(time("2014-11-02T06:01:01", chg)), isDate(time("2014-11-02T06:00:00", chg), chg)); - } - - public void testOffsetRounding() { - long twoHours = TimeUnit.HOURS.toMillis(2); - long oneDay = TimeUnit.DAYS.toMillis(1); - Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).offset(twoHours).build(); - assertThat(rounding.round(0), equalTo(-oneDay + twoHours)); - assertThat(rounding.round(twoHours), equalTo(twoHours)); - assertThat(rounding.nextRoundingValue(-oneDay), equalTo(-oneDay + twoHours)); - assertThat(rounding.nextRoundingValue(0), equalTo(twoHours)); - assertThat(rounding.withoutOffset().round(0), equalTo(0L)); - assertThat(rounding.withoutOffset().nextRoundingValue(0), equalTo(oneDay)); - - rounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).offset(-twoHours).build(); - assertThat(rounding.round(0), equalTo(-twoHours)); - assertThat(rounding.round(oneDay - twoHours), equalTo(oneDay - twoHours)); - assertThat(rounding.nextRoundingValue(-oneDay), equalTo(-twoHours)); - assertThat(rounding.nextRoundingValue(0), equalTo(oneDay - twoHours)); - assertThat(rounding.withoutOffset().round(0), equalTo(0L)); - assertThat(rounding.withoutOffset().nextRoundingValue(0), equalTo(oneDay)); - - rounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).timeZone(ZoneId.of("America/New_York")).offset(-twoHours).build(); - assertThat(rounding.round(time("2020-11-01T09:00:00")), equalTo(time("2020-11-01T02:00:00"))); - } - - /** - * Randomized test on TimeUnitRounding. Test uses random - * {@link DateTimeUnit} and {@link ZoneId} and often (50% of the time) - * chooses test dates that are exactly on or close to offset changes (e.g. - * DST) in the chosen time zone. - * - * It rounds the test date down and up and performs various checks on the - * rounding unit interval that is defined by this. Assumptions tested are - * described in - * {@link #assertInterval(long, long, long, Rounding, ZoneId)} - */ - public void testRandomTimeUnitRounding() { - for (int i = 0; i < 1000; ++i) { - Rounding.DateTimeUnit unit = randomFrom(Rounding.DateTimeUnit.values()); - ZoneId tz = randomZone(); - long[] bounds = randomDateBounds(unit); - assertUnitRoundingSameAsJavaUtilTimeImplementation(unit, tz, bounds[0], bounds[1]); - } - } - - /** - * This test chooses a date in the middle of the transition, so that we can test - * if the transition which is before the minLookup, but still should be applied - * is not skipped - */ - public void testRoundingAroundDST() { - Rounding.DateTimeUnit unit = Rounding.DateTimeUnit.DAY_OF_MONTH; - ZoneId tz = ZoneId.of("Canada/Newfoundland"); - long minLookup = 688618001000L; // 1991-10-28T02:46:41.527Z - long maxLookup = 688618001001L; // +1sec - // there is a Transition[Overlap at 1991-10-27T00:01-02:30 to -03:30] ” - assertUnitRoundingSameAsJavaUtilTimeImplementation(unit, tz, minLookup, maxLookup); - } - - private void assertUnitRoundingSameAsJavaUtilTimeImplementation(Rounding.DateTimeUnit unit, ZoneId tz, long start, long end) { - Rounding rounding = new Rounding.TimeUnitRounding(unit, tz); - Rounding.Prepared prepared = rounding.prepare(start, end); - - // Check that rounding is internally consistent and consistent with nextRoundingValue - long date = dateBetween(start, end); - long unitMillis = unit.getField().getBaseUnit().getDuration().toMillis(); - // FIXME this was copy pasted from the other impl and not used. breaks the nasty date actually gets assigned - if (randomBoolean()) { - nastyDate(date, tz, unitMillis); - } - final long roundedDate = prepared.round(date); - final long nextRoundingValue = prepared.nextRoundingValue(roundedDate); - - assertInterval(roundedDate, date, nextRoundingValue, rounding, tz); - - // check correct unit interval width for units smaller than a day, they should be fixed size except for transitions - if (unitMillis <= 86400 * 1000) { - // if the interval defined didn't cross timezone offset transition, it should cover unitMillis width - int offsetRounded = tz.getRules().getOffset(Instant.ofEpochMilli(roundedDate - 1)).getTotalSeconds(); - int offsetNextValue = tz.getRules().getOffset(Instant.ofEpochMilli(nextRoundingValue + 1)).getTotalSeconds(); - if (offsetRounded == offsetNextValue) { - assertThat("unit interval width not as expected for [" + unit + "], [" + tz + "] at " - + Instant.ofEpochMilli(roundedDate), nextRoundingValue - roundedDate, equalTo(unitMillis)); - } - } - - // Round a whole bunch of dates and make sure they line up with the known good java time implementation - Rounding.Prepared javaTimeRounding = rounding.prepareJavaTime(); - for (int d = 0; d < 1000; d++) { - date = dateBetween(start, end); - long javaRounded = javaTimeRounding.round(date); - long esRounded = prepared.round(date); - if (javaRounded != esRounded) { - fail("Expected [" + rounding + "] to round [" + Instant.ofEpochMilli(date) + "] to [" - + Instant.ofEpochMilli(javaRounded) + "] but instead rounded to [" + Instant.ofEpochMilli(esRounded) + "]"); - } - long javaNextRoundingValue = javaTimeRounding.nextRoundingValue(date); - long esNextRoundingValue = prepared.nextRoundingValue(date); - if (javaNextRoundingValue != esNextRoundingValue) { - fail("Expected [" + rounding + "] to round [" + Instant.ofEpochMilli(date) + "] to [" - + Instant.ofEpochMilli(esRounded) + "] and nextRoundingValue to be [" - + Instant.ofEpochMilli(javaNextRoundingValue) + "] but instead was to [" - + Instant.ofEpochMilli(esNextRoundingValue) + "]"); - } - } - } - - /** - * To be even more nasty, go to a transition in the selected time zone. - * In one third of the cases stay there, otherwise go half a unit back or forth - */ - private static long nastyDate(long initialDate, ZoneId timezone, long unitMillis) { - ZoneOffsetTransition transition = timezone.getRules().nextTransition(Instant.ofEpochMilli(initialDate)); - long date = initialDate; - if (transition != null) { - date = transition.getInstant().toEpochMilli(); - } - if (randomBoolean()) { - return date + (randomLong() % unitMillis); // positive and negative offset possible - } else { - return date; - } - } - - /** - * test DST end with interval rounding - * CET: 25 October 2015, 03:00:00 clocks were turned backward 1 hour to 25 October 2015, 02:00:00 local standard time - */ - public void testTimeIntervalCET_DST_End() { - long interval = TimeUnit.MINUTES.toMillis(20); - ZoneId tz = ZoneId.of("CET"); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - - assertThat(rounding.round(time("2015-10-25T01:55:00+02:00")), isDate(time("2015-10-25T01:40:00+02:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:15:00+02:00")), isDate(time("2015-10-25T02:00:00+02:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:35:00+02:00")), isDate(time("2015-10-25T02:20:00+02:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:55:00+02:00")), isDate(time("2015-10-25T02:40:00+02:00"), tz)); - // after DST shift - assertThat(rounding.round(time("2015-10-25T02:15:00+01:00")), isDate(time("2015-10-25T02:00:00+01:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:35:00+01:00")), isDate(time("2015-10-25T02:20:00+01:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:55:00+01:00")), isDate(time("2015-10-25T02:40:00+01:00"), tz)); - assertThat(rounding.round(time("2015-10-25T03:15:00+01:00")), isDate(time("2015-10-25T03:00:00+01:00"), tz)); - } - - /** - * test DST start with interval rounding - * CET: 27 March 2016, 02:00:00 clocks were turned forward 1 hour to 27 March 2016, 03:00:00 local daylight time - */ - public void testTimeIntervalCET_DST_Start() { - long interval = TimeUnit.MINUTES.toMillis(20); - ZoneId tz = ZoneId.of("CET"); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - // test DST start - assertThat(rounding.round(time("2016-03-27T01:55:00+01:00")), isDate(time("2016-03-27T01:40:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T02:00:00+01:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:15:00+02:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:35:00+02:00")), isDate(time("2016-03-27T03:20:00+02:00"), tz)); - } - - /** - * test DST start with offset not fitting interval, e.g. Asia/Kathmandu - * adding 15min on 1986-01-01T00:00:00 the interval from - * 1986-01-01T00:15:00+05:45 to 1986-01-01T00:20:00+05:45 to only be 5min - * long - */ - public void testTimeInterval_Kathmandu_DST_Start() { - long interval = TimeUnit.MINUTES.toMillis(20); - ZoneId tz = ZoneId.of("Asia/Kathmandu"); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - assertThat(rounding.round(time("1985-12-31T23:55:00+05:30")), isDate(time("1985-12-31T23:40:00+05:30"), tz)); - assertThat(rounding.round(time("1986-01-01T00:16:00+05:45")), isDate(time("1986-01-01T00:15:00+05:45"), tz)); - assertThat(time("1986-01-01T00:15:00+05:45") - time("1985-12-31T23:40:00+05:30"), equalTo(TimeUnit.MINUTES.toMillis(20))); - assertThat(rounding.round(time("1986-01-01T00:26:00+05:45")), isDate(time("1986-01-01T00:20:00+05:45"), tz)); - assertThat(time("1986-01-01T00:20:00+05:45") - time("1986-01-01T00:15:00+05:45"), equalTo(TimeUnit.MINUTES.toMillis(5))); - assertThat(rounding.round(time("1986-01-01T00:46:00+05:45")), isDate(time("1986-01-01T00:40:00+05:45"), tz)); - assertThat(time("1986-01-01T00:40:00+05:45") - time("1986-01-01T00:20:00+05:45"), equalTo(TimeUnit.MINUTES.toMillis(20))); - } - - /** - * Special test for intervals that don't fit evenly into rounding interval. - * In this case, when interval crosses DST transition point, rounding in local - * time can land in a DST gap which results in wrong UTC rounding values. - */ - public void testIntervalRounding_NotDivisibleInteval() { - long interval = TimeUnit.MINUTES.toMillis(14); - ZoneId tz = ZoneId.of("CET"); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - - assertThat(rounding.round(time("2016-03-27T01:41:00+01:00")), isDate(time("2016-03-27T01:30:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T01:51:00+01:00")), isDate(time("2016-03-27T01:44:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T01:59:00+01:00")), isDate(time("2016-03-27T01:58:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:05:00+02:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:12:00+02:00")), isDate(time("2016-03-27T03:08:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:25:00+02:00")), isDate(time("2016-03-27T03:22:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:39:00+02:00")), isDate(time("2016-03-27T03:36:00+02:00"), tz)); - } - - /** - * Test for half day rounding intervals scrossing DST. - */ - public void testIntervalRounding_HalfDay_DST() { - long interval = TimeUnit.HOURS.toMillis(12); - ZoneId tz = ZoneId.of("CET"); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - - assertThat(rounding.round(time("2016-03-26T01:00:00+01:00")), isDate(time("2016-03-26T00:00:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-26T13:00:00+01:00")), isDate(time("2016-03-26T12:00:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T01:00:00+01:00")), isDate(time("2016-03-27T00:00:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T13:00:00+02:00")), isDate(time("2016-03-27T12:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-28T01:00:00+02:00")), isDate(time("2016-03-28T00:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-28T13:00:00+02:00")), isDate(time("2016-03-28T12:00:00+02:00"), tz)); - } - - public void testRandomTimeIntervalRounding() { - for (int i = 0; i < 1000; i++) { - int unitCount = randomIntBetween(1, 365); - TimeUnit unit = randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS); - long interval = unit.toMillis(unitCount); - ZoneId tz = randomZone(); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - long mainDate = randomDate(); - if (randomBoolean()) { - mainDate = nastyDate(mainDate, tz, interval); - } - long min = mainDate - 2 * interval; - long max = mainDate + 2 * interval; - - /* - * Prepare a rounding with two extra intervals of range because - * in the tests far below we call round(round(min) - 1). The first - * round might spit out a time below min - interval if min is near - * a daylight savings time transition. So we request an extra big - * range just in case. - */ - Rounding.Prepared prepared = rounding.prepare(min - 2 * interval, max); - - // Round a whole bunch of dates and make sure they line up with the known good java time implementation - Rounding.Prepared javaTimeRounding = rounding.prepareJavaTime(); - for (int d = 0; d < 1000; d++) { - long date = dateBetween(min, max); - long javaRounded = javaTimeRounding.round(date); - long esRounded = prepared.round(date); - if (javaRounded != esRounded) { - fail("Expected [" + unitCount + " " + unit + " in " + tz + "] to round [" + Instant.ofEpochMilli(date) + "] to [" - + Instant.ofEpochMilli(javaRounded) + "] but instead rounded to [" + Instant.ofEpochMilli(esRounded) + "]"); - } - long javaNextRoundingValue = javaTimeRounding.nextRoundingValue(date); - long esNextRoundingValue = prepared.nextRoundingValue(date); - if (javaNextRoundingValue != esNextRoundingValue) { - fail("Expected [" + unitCount + " " + unit + " in " + tz + "] to round [" + Instant.ofEpochMilli(date) + "] to [" - + Instant.ofEpochMilli(esRounded) + "] and nextRoundingValue to be [" - + Instant.ofEpochMilli(javaNextRoundingValue) + "] but instead was to [" - + Instant.ofEpochMilli(esNextRoundingValue) + "]"); - } - } - - // check two intervals around date - long previousRoundedValue = Long.MIN_VALUE; - for (long date = min; date < max; date += interval / 2) { - try { - final long roundedDate = rounding.round(date); - final long nextRoundingValue = prepared.nextRoundingValue(roundedDate); - assertThat("Rounding should be idempotent", roundedDate, equalTo(prepared.round(roundedDate))); - assertThat("Rounded value smaller or equal than unrounded", roundedDate, lessThanOrEqualTo(date)); - assertThat("Values smaller than rounded value should round further down", prepared.round(roundedDate - 1), - lessThan(roundedDate)); - assertThat("Rounding should be >= previous rounding value", roundedDate, greaterThanOrEqualTo(previousRoundedValue)); - assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate)); - assertThat("NextRounding value rounds to itself", nextRoundingValue, - isDate(rounding.round(nextRoundingValue), tz)); - - if (tz.getRules().isFixedOffset()) { - assertThat("NextRounding value should be interval from rounded value", nextRoundingValue - roundedDate, - equalTo(interval)); - } - previousRoundedValue = roundedDate; - } catch (AssertionError e) { - ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(date), tz); - ZonedDateTime previousRoundedValueDate = ZonedDateTime.ofInstant(Instant.ofEpochMilli(previousRoundedValue), tz); - logger.error("Rounding error at {}/{}, timezone {}, interval: {} previousRoundedValue {}/{}", dateTime, date, - tz, interval, previousRoundedValueDate, previousRoundedValue); - throw e; - } - } - } - } - - /** - * Check a {@link Rounding.Prepared#nextRoundingValue} that was difficult - * to build well with the java.time APIs. - */ - public void testHardNextRoundingValue() { - Rounding rounding = new Rounding.TimeIntervalRounding(960000, ZoneId.of("Europe/Minsk")); - long rounded = rounding.prepareForUnknown().round(877824908400L); - long next = rounding.prepareForUnknown().nextRoundingValue(rounded); - assertThat(next, greaterThan(rounded)); - } - - /** - * Check a {@link Rounding.Prepared#nextRoundingValue} that was difficult - * to build well with the java.time APIs. - */ - public void testOtherHardNextRoundingValue() { - Rounding rounding = new Rounding.TimeIntervalRounding(480000, ZoneId.of("Portugal")); - long rounded = rounding.prepareJavaTime().round(972780720000L); - long next = rounding.prepareJavaTime().nextRoundingValue(rounded); - assertThat(next, greaterThan(rounded)); - } - - /** - * Check a {@link Rounding.Prepared#nextRoundingValue} that was difficult - * to build well our janky Newton's Method/binary search hybrid. - */ - public void testHardNewtonsMethod() { - ZoneId tz = ZoneId.of("Asia/Jerusalem"); - Rounding rounding = new Rounding.TimeIntervalRounding(19800000, tz); - assertThat(rounding.prepareJavaTime().nextRoundingValue(1824929914182L), isDate(time("2027-10-31T01:30:00", tz), tz)); - } - - /** - * Check a {@link Rounding.Prepared#nextRoundingValue} that was difficult - * to build well with the java.time APIs. - */ - public void testOtherHardNewtonsMethod() { - ZoneId tz = ZoneId.of("America/Glace_Bay"); - Rounding rounding = new Rounding.TimeIntervalRounding(13800000, tz); - assertThat(rounding.prepareJavaTime().nextRoundingValue(1383463147373L), isDate(time("2013-11-03T03:40:00", tz), tz)); - } - - /** - * Test that rounded values are always greater or equal to last rounded value if date is increasing. - * The example covers an interval around 2011-10-30T02:10:00+01:00, time zone CET, interval: 2700000ms - */ - public void testIntervalRoundingMonotonic_CET() { - long interval = TimeUnit.MINUTES.toMillis(45); - ZoneId tz = ZoneId.of("CET"); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - List> expectedDates = new ArrayList<>(); - // first date is the date to be rounded, second the expected result - expectedDates.add(new Tuple<>("2011-10-30T01:40:00.000+02:00", "2011-10-30T01:30:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:02:30.000+02:00", "2011-10-30T01:30:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:25:00.000+02:00", "2011-10-30T02:15:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:47:30.000+02:00", "2011-10-30T02:15:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:10:00.000+01:00", "2011-10-30T02:15:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:32:30.000+01:00", "2011-10-30T02:15:00.000+01:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:55:00.000+01:00", "2011-10-30T02:15:00.000+01:00")); - expectedDates.add(new Tuple<>("2011-10-30T03:17:30.000+01:00", "2011-10-30T03:00:00.000+01:00")); - - long previousDate = Long.MIN_VALUE; - for (Tuple dates : expectedDates) { - final long roundedDate = rounding.round(time(dates.v1())); - assertThat(dates.toString(), roundedDate, isDate(time(dates.v2()), tz)); - assertThat(dates.toString(), roundedDate, greaterThanOrEqualTo(previousDate)); - previousDate = roundedDate; - } - // here's what this means for interval widths - assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T02:15:00.000+02:00") - time("2011-10-30T01:30:00.000+02:00")); - assertEquals(TimeUnit.MINUTES.toMillis(60), time("2011-10-30T02:15:00.000+01:00") - time("2011-10-30T02:15:00.000+02:00")); - assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T03:00:00.000+01:00") - time("2011-10-30T02:15:00.000+01:00")); - } - - /** - * special test for DST switch from #9491 - */ - public void testAmbiguousHoursAfterDSTSwitch() { - Rounding tzRounding; - final ZoneId tz = ZoneId.of("Asia/Jerusalem"); - tzRounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-10-26T00:30:00+03:00")), isDate(time("2014-10-26T00:00:00+03:00"), tz)); - assertThat(tzRounding.round(time("2014-10-26T01:30:00+03:00")), isDate(time("2014-10-26T01:00:00+03:00"), tz)); - // the utc date for "2014-10-25T03:00:00+03:00" and "2014-10-25T03:00:00+02:00" is the same, local time turns back 1h here - assertThat(time("2014-10-26T03:00:00+03:00"), isDate(time("2014-10-26T02:00:00+02:00"), tz)); - assertThat(tzRounding.round(time("2014-10-26T01:30:00+02:00")), isDate(time("2014-10-26T01:00:00+02:00"), tz)); - assertThat(tzRounding.round(time("2014-10-26T02:30:00+02:00")), isDate(time("2014-10-26T02:00:00+02:00"), tz)); - - // Day interval - tzRounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-11-11T00:00:00", tz), tz)); - // DST on - assertThat(tzRounding.round(time("2014-08-11T17:00:00", tz)), isDate(time("2014-08-11T00:00:00", tz), tz)); - // Day of switching DST on -> off - assertThat(tzRounding.round(time("2014-10-26T17:00:00", tz)), isDate(time("2014-10-26T00:00:00", tz), tz)); - // Day of switching DST off -> on - assertThat(tzRounding.round(time("2015-03-27T17:00:00", tz)), isDate(time("2015-03-27T00:00:00", tz), tz)); - - // Month interval - tzRounding = Rounding.builder(Rounding.DateTimeUnit.MONTH_OF_YEAR).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-11-01T00:00:00", tz), tz)); - // DST on - assertThat(tzRounding.round(time("2014-10-10T17:00:00", tz)), isDate(time("2014-10-01T00:00:00", tz), tz)); - - // Year interval - tzRounding = Rounding.builder(Rounding.DateTimeUnit.YEAR_OF_CENTURY).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-01-01T00:00:00", tz), tz)); - - // Two timestamps in same year and different timezone offset ("Double buckets" issue - #9491) - tzRounding = Rounding.builder(Rounding.DateTimeUnit.YEAR_OF_CENTURY).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), - isDate(tzRounding.round(time("2014-08-11T17:00:00", tz)), tz)); - } - - /** - * test for #10025, strict local to UTC conversion can cause joda exceptions - * on DST start - */ - public void testLenientConversionDST() { - ZoneId tz = ZoneId.of("America/Sao_Paulo"); - - long start = time("2014-10-18T20:50:00.000", tz); - long end = time("2014-10-19T01:00:00.000", tz); - Rounding tzRounding = new Rounding.TimeUnitRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, tz); - Rounding dayTzRounding = new Rounding.TimeIntervalRounding(60000, tz); - for (long time = start; time < end; time = time + 60000) { - assertThat(tzRounding.nextRoundingValue(time), greaterThan(time)); - assertThat(dayTzRounding.nextRoundingValue(time), greaterThan(time)); - } - } - - public void testEdgeCasesTransition() { - { - // standard +/-1 hour DST transition, CET - ZoneId tz = ZoneId.of("CET"); - Rounding rounding = new Rounding.TimeUnitRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, tz); - - // 29 Mar 2015 - Daylight Saving Time Started - // at 02:00:00 clocks were turned forward 1 hour to 03:00:00 - assertInterval(time("2015-03-29T00:00:00.000+01:00"), time("2015-03-29T01:00:00.000+01:00"), rounding, 60, tz); - assertInterval(time("2015-03-29T01:00:00.000+01:00"), time("2015-03-29T03:00:00.000+02:00"), rounding, 60, tz); - assertInterval(time("2015-03-29T03:00:00.000+02:00"), time("2015-03-29T04:00:00.000+02:00"), rounding, 60, tz); - - // 25 Oct 2015 - Daylight Saving Time Ended - // at 03:00:00 clocks were turned backward 1 hour to 02:00:00 - assertInterval(time("2015-10-25T01:00:00.000+02:00"), time("2015-10-25T02:00:00.000+02:00"), rounding, 60, tz); - assertInterval(time("2015-10-25T02:00:00.000+02:00"), time("2015-10-25T02:00:00.000+01:00"), rounding, 60, tz); - assertInterval(time("2015-10-25T02:00:00.000+01:00"), time("2015-10-25T03:00:00.000+01:00"), rounding, 60, tz); - } - - { - // time zone "Asia/Kathmandu" - // 1 Jan 1986 - Time Zone Change (IST → NPT), at 00:00:00 clocks were turned forward 00:15 minutes - // - // hour rounding is stable before 1985-12-31T23:00:00.000 and after 1986-01-01T01:00:00.000+05:45 - // the interval between is 105 minutes long because the hour after transition starts at 00:15 - // which is not a round value for hourly rounding - ZoneId tz = ZoneId.of("Asia/Kathmandu"); - Rounding rounding = new Rounding.TimeUnitRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, tz); - - assertInterval(time("1985-12-31T22:00:00.000+05:30"), time("1985-12-31T23:00:00.000+05:30"), rounding, 60, tz); - assertInterval(time("1985-12-31T23:00:00.000+05:30"), time("1986-01-01T01:00:00.000+05:45"), rounding, 105, tz); - assertInterval(time("1986-01-01T01:00:00.000+05:45"), time("1986-01-01T02:00:00.000+05:45"), rounding, 60, tz); - } - - { - // time zone "Australia/Lord_Howe" - // 3 Mar 1991 - Daylight Saving Time Ended - // at 02:00:00 clocks were turned backward 0:30 hours to Sunday, 3 March 1991, 01:30:00 - ZoneId tz = ZoneId.of("Australia/Lord_Howe"); - Rounding rounding = new Rounding.TimeUnitRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, tz); - - assertInterval(time("1991-03-03T00:00:00.000+11:00"), time("1991-03-03T01:00:00.000+11:00"), rounding, 60, tz); - assertInterval(time("1991-03-03T01:00:00.000+11:00"), time("1991-03-03T02:00:00.000+10:30"), rounding, 90, tz); - assertInterval(time("1991-03-03T02:00:00.000+10:30"), time("1991-03-03T03:00:00.000+10:30"), rounding, 60, tz); - - // 27 Oct 1991 - Daylight Saving Time Started - // at 02:00:00 clocks were turned forward 0:30 hours to 02:30:00 - assertInterval(time("1991-10-27T00:00:00.000+10:30"), time("1991-10-27T01:00:00.000+10:30"), rounding, 60, tz); - // the interval containing the switch time is 90 minutes long - assertInterval(time("1991-10-27T01:00:00.000+10:30"), time("1991-10-27T03:00:00.000+11:00"), rounding, 90, tz); - assertInterval(time("1991-10-27T03:00:00.000+11:00"), time("1991-10-27T04:00:00.000+11:00"), rounding, 60, tz); - } - - { - // time zone "Pacific/Chatham" - // 5 Apr 2015 - Daylight Saving Time Ended - // at 03:45:00 clocks were turned backward 1 hour to 02:45:00 - ZoneId tz = ZoneId.of("Pacific/Chatham"); - Rounding rounding = new Rounding.TimeUnitRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, tz); - - assertInterval(time("2015-04-05T02:00:00.000+13:45"), time("2015-04-05T03:00:00.000+13:45"), rounding, 60, tz); - assertInterval(time("2015-04-05T03:00:00.000+13:45"), time("2015-04-05T03:00:00.000+12:45"), rounding, 60, tz); - assertInterval(time("2015-04-05T03:00:00.000+12:45"), time("2015-04-05T04:00:00.000+12:45"), rounding, 60, tz); - - // 27 Sep 2015 - Daylight Saving Time Started - // at 02:45:00 clocks were turned forward 1 hour to 03:45:00 - - assertInterval(time("2015-09-27T01:00:00.000+12:45"), time("2015-09-27T02:00:00.000+12:45"), rounding, 60, tz); - assertInterval(time("2015-09-27T02:00:00.000+12:45"), time("2015-09-27T04:00:00.000+13:45"), rounding, 60, tz); - assertInterval(time("2015-09-27T04:00:00.000+13:45"), time("2015-09-27T05:00:00.000+13:45"), rounding, 60, tz); - } - } - - public void testDST_Europe_Rome() { - // time zone "Europe/Rome", rounding to days. Rome had two midnights on the day the clocks went back in 1978, and - // timeZone.convertLocalToUTC() gives the later of the two because Rome is east of UTC, whereas we want the earlier. - - ZoneId tz = ZoneId.of("Europe/Rome"); - Rounding rounding = new Rounding.TimeUnitRounding(Rounding.DateTimeUnit.DAY_OF_MONTH, tz); - - { - long timeBeforeFirstMidnight = time("1978-09-30T23:59:00+02:00"); - long floor = rounding.round(timeBeforeFirstMidnight); - assertThat(floor, isDate(time("1978-09-30T00:00:00+02:00"), tz)); - } - - { - long timeBetweenMidnights = time("1978-10-01T00:30:00+02:00"); - long floor = rounding.round(timeBetweenMidnights); - assertThat(floor, isDate(time("1978-10-01T00:00:00+02:00"), tz)); - } - - { - long timeAfterSecondMidnight = time("1978-10-01T00:30:00+01:00"); - long floor = rounding.round(timeAfterSecondMidnight); - assertThat(floor, isDate(time("1978-10-01T00:00:00+02:00"), tz)); - - long prevFloor = rounding.round(floor - 1); - assertThat(prevFloor, lessThan(floor)); - assertThat(prevFloor, isDate(time("1978-09-30T00:00:00+02:00"), tz)); - } - } - - /** - * Test for a time zone whose days overlap because the clocks are set back across midnight at the end of DST. - */ - public void testDST_America_St_Johns() { - // time zone "America/St_Johns", rounding to days. - ZoneId tz = ZoneId.of("America/St_Johns"); - Rounding rounding = new Rounding.TimeUnitRounding(Rounding.DateTimeUnit.DAY_OF_MONTH, tz); - - // 29 October 2006 - Daylight Saving Time ended, changing the UTC offset from -02:30 to -03:30. - // This happened at 02:31 UTC, 00:01 local time, so the clocks were set back 1 hour to 23:01 on the 28th. - // This means that 2006-10-29 has _two_ midnights, one in the -02:30 offset and one in the -03:30 offset. - // Only the first of these is considered "rounded". Moreover, the extra time between 23:01 and 23:59 - // should be considered as part of the 28th even though it comes after midnight on the 29th. - - { - // Times before the first midnight should be rounded up to the first midnight. - long timeBeforeFirstMidnight = time("2006-10-28T23:30:00.000-02:30"); - long floor = rounding.round(timeBeforeFirstMidnight); - assertThat(floor, isDate(time("2006-10-28T00:00:00.000-02:30"), tz)); - long ceiling = rounding.nextRoundingValue(timeBeforeFirstMidnight); - assertThat(ceiling, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - assertInterval(floor, timeBeforeFirstMidnight, ceiling, rounding, tz); - } - - { - // Times between the two midnights which are on the later day should be rounded down to the later day's midnight. - long timeBetweenMidnights = time("2006-10-29T00:00:30.000-02:30"); - // (this is halfway through the last minute before the clocks changed, in which local time was ambiguous) - - long floor = rounding.round(timeBetweenMidnights); - assertThat(floor, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - - long ceiling = rounding.nextRoundingValue(timeBetweenMidnights); - assertThat(ceiling, isDate(time("2006-10-30T00:00:00.000-03:30"), tz)); - - assertInterval(floor, timeBetweenMidnights, ceiling, rounding, tz); - } - - { - // Times between the two midnights which are on the earlier day should be rounded down to the earlier day's midnight. - long timeBetweenMidnights = time("2006-10-28T23:30:00.000-03:30"); - // (this is halfway through the hour after the clocks changed, in which local time was ambiguous) - - long floor = rounding.round(timeBetweenMidnights); - assertThat(floor, isDate(time("2006-10-28T00:00:00.000-02:30"), tz)); - - long ceiling = rounding.nextRoundingValue(timeBetweenMidnights); - assertThat(ceiling, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - - assertInterval(floor, timeBetweenMidnights, ceiling, rounding, tz); - } - - { - // Times after the second midnight should be rounded down to the first midnight. - long timeAfterSecondMidnight = time("2006-10-29T06:00:00.000-03:30"); - long floor = rounding.round(timeAfterSecondMidnight); - assertThat(floor, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - long ceiling = rounding.nextRoundingValue(timeAfterSecondMidnight); - assertThat(ceiling, isDate(time("2006-10-30T00:00:00.000-03:30"), tz)); - assertInterval(floor, timeAfterSecondMidnight, ceiling, rounding, tz); - } - } - - /** - * Tests for DST transitions that cause the rounding to jump "backwards" because they round - * from one back to the previous day. Usually these rounding start before - */ - public void testForwardsBackwardsTimeZones() { - for (String zoneId : JAVA_ZONE_IDS) { - ZoneId tz = ZoneId.of(zoneId); - ZoneRules rules = tz.getRules(); - for (ZoneOffsetTransition transition : rules.getTransitions()) { - checkForForwardsBackwardsTransition(tz, transition); - } - int firstYear; - if (rules.getTransitions().isEmpty()) { - // Pick an arbitrary year to start the range - firstYear = 1999; - } else { - ZoneOffsetTransition lastTransition = rules.getTransitions().get(rules.getTransitions().size() - 1); - firstYear = lastTransition.getDateTimeAfter().getYear() + 1; - } - // Pick an arbitrary year to end the range too - int lastYear = 2050; - int year = randomFrom(firstYear, lastYear); - for (ZoneOffsetTransitionRule transitionRule : rules.getTransitionRules()) { - ZoneOffsetTransition transition = transitionRule.createTransition(year); - checkForForwardsBackwardsTransition(tz, transition); - } - } - } - - private void checkForForwardsBackwardsTransition(ZoneId tz, ZoneOffsetTransition transition) { - if (transition.getDateTimeBefore().getYear() < 1950) { - // We don't support transitions far in the past at all - return; - } - if (false == transition.isOverlap()) { - // Only overlaps cause the array rounding to have trouble - return; - } - if (transition.getDateTimeBefore().getDayOfMonth() == transition.getDateTimeAfter().getDayOfMonth()) { - // Only when the rounding changes the day - return; - } - if (transition.getDateTimeBefore().getMinute() == 0) { - // But roundings that change *at* midnight are safe because they don't "jump" to the next day. - return; - } - logger.info( - "{} from {}{} to {}{}", - tz, - transition.getDateTimeBefore(), - transition.getOffsetBefore(), - transition.getDateTimeAfter(), - transition.getOffsetAfter() - ); - long millisSinceEpoch = TimeUnit.SECONDS.toMillis(transition.toEpochSecond()); - long twoHours = TimeUnit.HOURS.toMillis(2); - assertUnitRoundingSameAsJavaUtilTimeImplementation( - Rounding.DateTimeUnit.DAY_OF_MONTH, - tz, - millisSinceEpoch - twoHours, - millisSinceEpoch + twoHours - ); - } - - /** - * tests for dst transition with overlaps and day roundings. - */ - public void testDST_END_Edgecases() { - // First case, dst happens at 1am local time, switching back one hour. - // We want the overlapping hour to count for the next day, making it a 25h interval - - ZoneId tz = ZoneId.of("Atlantic/Azores"); - Rounding.DateTimeUnit timeUnit = Rounding.DateTimeUnit.DAY_OF_MONTH; - Rounding rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - // Sunday, 29 October 2000, 01:00:00 clocks were turned backward 1 hour - // to Sunday, 29 October 2000, 00:00:00 local standard time instead - // which means there were two midnights that day. - - long midnightBeforeTransition = time("2000-10-29T00:00:00", tz); - long midnightOfTransition = time("2000-10-29T00:00:00-01:00"); - assertEquals(60L * 60L * 1000L, midnightOfTransition - midnightBeforeTransition); - long nextMidnight = time("2000-10-30T00:00:00", tz); - - assertInterval(midnightBeforeTransition, nextMidnight, rounding, 25 * 60, tz); - - assertThat(rounding.round(time("2000-10-29T06:00:00-01:00")), isDate(time("2000-10-29T00:00:00Z"), tz)); - - // Second case, dst happens at 0am local time, switching back one hour to 23pm local time. - // We want the overlapping hour to count for the previous day here - - tz = ZoneId.of("America/Lima"); - rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - // Sunday, 1 April 1990, 00:00:00 clocks were turned backward 1 hour to - // Saturday, 31 March 1990, 23:00:00 local standard time instead - - midnightBeforeTransition = time("1990-03-31T00:00:00.000-04:00"); - nextMidnight = time("1990-04-01T00:00:00.000-05:00"); - assertInterval(midnightBeforeTransition, nextMidnight, rounding, 25 * 60, tz); - - // make sure the next interval is 24h long again - long midnightAfterTransition = time("1990-04-01T00:00:00.000-05:00"); - nextMidnight = time("1990-04-02T00:00:00.000-05:00"); - assertInterval(midnightAfterTransition, nextMidnight, rounding, 24 * 60, tz); - } - - public void testBeforeOverlapLarge() { - // Moncton has a perfectly normal hour long Daylight Savings time. - ZoneId tz = ZoneId.of("America/Moncton"); - Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build(); - assertThat(rounding.round(time("2003-10-26T03:43:35.079Z")), isDate(time("2003-10-26T03:00:00Z"), tz)); - } - - public void testBeforeOverlapSmall() { - /* - * Lord Howe is fun because Daylight Savings time is only 30 minutes - * so we round HOUR_OF_DAY differently. - */ - ZoneId tz = ZoneId.of("Australia/Lord_Howe"); - Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build(); - assertThat(rounding.round(time("2018-03-31T15:25:15.148Z")), isDate(time("2018-03-31T14:00:00Z"), tz)); - } - - public void testQuarterOfYear() { - /* - * If we're not careful with how we look up local time offsets we can - * end up not loading the offsets far enough back to round this time - * to QUARTER_OF_YEAR properly. - */ - ZoneId tz = ZoneId.of("Asia/Baghdad"); - Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.QUARTER_OF_YEAR).timeZone(tz).build(); - assertThat(rounding.round(time("2006-12-31T13:21:44.308Z")), isDate(time("2006-09-30T20:00:00Z"), tz)); - } - - public void testPrepareLongRangeRoundsToMidnight() { - ZoneId tz = ZoneId.of("America/New_York"); - long min = time("01980-01-01T00:00:00Z"); - long max = time("10000-01-01T00:00:00Z"); - Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.QUARTER_OF_YEAR).timeZone(tz).build(); - assertThat(rounding.round(time("2006-12-31T13:21:44.308Z")), isDate(time("2006-10-01T04:00:00Z"), tz)); - assertThat(rounding.round(time("9000-12-31T13:21:44.308Z")), isDate(time("9000-10-01T04:00:00Z"), tz)); - - Rounding.Prepared prepared = rounding.prepare(min, max); - assertThat(prepared.round(time("2006-12-31T13:21:44.308Z")), isDate(time("2006-10-01T04:00:00Z"), tz)); - assertThat(prepared.round(time("9000-12-31T13:21:44.308Z")), isDate(time("9000-10-01T04:00:00Z"), tz)); - } - - public void testPrepareLongRangeRoundsNotToMidnight() { - ZoneId tz = ZoneId.of("Australia/Lord_Howe"); - long min = time("01980-01-01T00:00:00Z"); - long max = time("10000-01-01T00:00:00Z"); - Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build(); - assertThat(rounding.round(time("2018-03-31T15:25:15.148Z")), isDate(time("2018-03-31T14:00:00Z"), tz)); - assertThat(rounding.round(time("9000-03-31T15:25:15.148Z")), isDate(time("9000-03-31T15:00:00Z"), tz)); - - Rounding.Prepared prepared = rounding.prepare(min, max); - assertThat(prepared.round(time("2018-03-31T15:25:15.148Z")), isDate(time("2018-03-31T14:00:00Z"), tz)); - assertThat(prepared.round(time("9000-03-31T15:25:15.148Z")), isDate(time("9000-03-31T15:00:00Z"), tz)); - } - - /** - * Example of when we round past when local clocks were wound forward. - */ - public void testIntervalBeforeGap() { - ZoneId tz = ZoneId.of("Africa/Cairo"); - Rounding rounding = Rounding.builder(TimeValue.timeValueDays(257)).timeZone(tz).build(); - assertThat(rounding.round(time("1969-07-08T09:00:14.599Z")), isDate(time("1969-04-18T22:00:00Z"), tz)); - } - - /** - * Example of when we round past when local clocks were wound backwards, - * and then past the time they were wound forwards before - * that. So, we jumped back a long, long way. - */ - public void testIntervalTwoTransitions() { - ZoneId tz = ZoneId.of("America/Detroit"); - Rounding rounding = Rounding.builder(TimeValue.timeValueDays(279)).timeZone(tz).build(); - assertThat(rounding.round(time("1982-11-10T02:51:22.662Z")), isDate(time("1982-03-23T05:00:00Z"), tz)); - } - - public void testFixedIntervalRoundingSize() { - Rounding unitRounding = Rounding.builder(TimeValue.timeValueHours(10)).build(); - Rounding.Prepared prepared = unitRounding.prepare(time("2010-01-01T00:00:00.000Z"), time("2020-01-01T00:00:00.000Z")); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.SECOND_OF_MINUTE), - closeTo(36000.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.MINUTES_OF_HOUR), - closeTo(600.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.HOUR_OF_DAY), - closeTo(10.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.DAY_OF_MONTH), - closeTo(10.0 / 24.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR), - closeTo(10.0 / 168.0, 0.000001)); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.MONTH_OF_YEAR)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [month] with fixed interval based histogram, " + - "only week, day, hour, minute and second are supported for this histogram")); - ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.QUARTER_OF_YEAR)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [quarter] with fixed interval based histogram, " + - "only week, day, hour, minute and second are supported for this histogram")); - ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.YEAR_OF_CENTURY)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [year] with fixed interval based histogram, " + - "only week, day, hour, minute and second are supported for this histogram")); - } - - public void testMillisecondsBasedUnitCalendarRoundingSize() { - Rounding unitRounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).build(); - Rounding.Prepared prepared = unitRounding.prepare(time("2010-01-01T00:00:00.000Z"), time("2020-01-01T00:00:00.000Z")); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.SECOND_OF_MINUTE), - closeTo(3600.0, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.SECOND_OF_MINUTE), - closeTo(3600.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.MINUTES_OF_HOUR), - closeTo(60.0, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.MINUTES_OF_HOUR), - closeTo(60.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.HOUR_OF_DAY), - closeTo(1.0, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.HOUR_OF_DAY), - closeTo(1.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.DAY_OF_MONTH), - closeTo(1 / 24.0, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.DAY_OF_MONTH), - closeTo(1 / 24.0, 0.000001)); - assertThat(prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR), - closeTo(1 / 168.0, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR), - closeTo(1 / 168.0, 0.000001)); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.MONTH_OF_YEAR)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [month] with non-month based calendar interval " + - "histogram [hour] only week, day, hour, minute and second are supported for this histogram")); - ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(Rounding.DateTimeUnit.MONTH_OF_YEAR)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [month] with non-month based calendar interval " + - "histogram [hour] only week, day, hour, minute and second are supported for this histogram")); - ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.QUARTER_OF_YEAR)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [quarter] with non-month based calendar interval " + - "histogram [hour] only week, day, hour, minute and second are supported for this histogram")); - ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(Rounding.DateTimeUnit.QUARTER_OF_YEAR)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [quarter] with non-month based calendar interval " + - "histogram [hour] only week, day, hour, minute and second are supported for this histogram")); - ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(time("2015-01-01T00:00:00.000Z"), Rounding.DateTimeUnit.YEAR_OF_CENTURY)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [year] with non-month based calendar interval " + - "histogram [hour] only week, day, hour, minute and second are supported for this histogram")); - ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(Rounding.DateTimeUnit.YEAR_OF_CENTURY)); - assertThat(ex.getMessage(), equalTo("Cannot use month-based rate unit [year] with non-month based calendar interval " + - "histogram [hour] only week, day, hour, minute and second are supported for this histogram")); - } - - public void testNonMillisecondsBasedUnitCalendarRoundingSize() { - Rounding unitRounding = Rounding.builder(Rounding.DateTimeUnit.QUARTER_OF_YEAR).build(); - Rounding.Prepared prepared = unitRounding.prepare(time("2010-01-01T00:00:00.000Z"), time("2020-01-01T00:00:00.000Z")); - long firstQuarter = prepared.round(time("2015-01-01T00:00:00.000Z")); - // Ratio based - assertThat(prepared.roundingSize(firstQuarter, Rounding.DateTimeUnit.MONTH_OF_YEAR), closeTo(3.0, 0.000001)); - assertThat(prepared.roundingSize(firstQuarter, Rounding.DateTimeUnit.QUARTER_OF_YEAR), closeTo(1.0, 0.000001)); - assertThat(prepared.roundingSize(firstQuarter, Rounding.DateTimeUnit.YEAR_OF_CENTURY), closeTo(0.25, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.MONTH_OF_YEAR), closeTo(3.0, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.QUARTER_OF_YEAR), closeTo(1.0, 0.000001)); - assertThat(prepared.roundingSize(Rounding.DateTimeUnit.YEAR_OF_CENTURY), closeTo(0.25, 0.000001)); - // Real interval based - assertThat(prepared.roundingSize(firstQuarter, Rounding.DateTimeUnit.SECOND_OF_MINUTE), closeTo(7776000.0, 0.000001)); - assertThat(prepared.roundingSize(firstQuarter, Rounding.DateTimeUnit.MINUTES_OF_HOUR), closeTo(129600.0, 0.000001)); - assertThat(prepared.roundingSize(firstQuarter, Rounding.DateTimeUnit.HOUR_OF_DAY), closeTo(2160.0, 0.000001)); - assertThat(prepared.roundingSize(firstQuarter, Rounding.DateTimeUnit.DAY_OF_MONTH), closeTo(90.0, 0.000001)); - long thirdQuarter = prepared.round(time("2015-07-01T00:00:00.000Z")); - assertThat(prepared.roundingSize(thirdQuarter, Rounding.DateTimeUnit.DAY_OF_MONTH), closeTo(92.0, 0.000001)); - assertThat(prepared.roundingSize(thirdQuarter, Rounding.DateTimeUnit.HOUR_OF_DAY), closeTo(2208.0, 0.000001)); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> prepared.roundingSize(Rounding.DateTimeUnit.SECOND_OF_MINUTE)); - assertThat(ex.getMessage(), equalTo("Cannot use non month-based rate unit [second] with calendar interval histogram " + - "[quarter] only month, quarter and year are supported for this histogram")); - } - - public void testFixedRoundingPoints() { - Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.QUARTER_OF_YEAR).build(); - assertFixedRoundingPoints( - rounding.prepare(time("2020-01-01T00:00:00"), time("2021-01-01T00:00:00")), - "2020-01-01T00:00:00", - "2020-04-01T00:00:00", - "2020-07-01T00:00:00", - "2020-10-01T00:00:00", - "2021-01-01T00:00:00" - ); - rounding = Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build(); - assertFixedRoundingPoints( - rounding.prepare(time("2020-01-01T00:00:00"), time("2020-01-06T00:00:00")), - "2020-01-01T00:00:00", - "2020-01-02T00:00:00", - "2020-01-03T00:00:00", - "2020-01-04T00:00:00", - "2020-01-05T00:00:00", - "2020-01-06T00:00:00" - ); - } - - private void assertFixedRoundingPoints(Rounding.Prepared prepared, String... expected) { - assertThat( - Arrays.stream(prepared.fixedRoundingPoints()).mapToObj(Instant::ofEpochMilli).collect(toList()), - equalTo(Arrays.stream(expected).map(RoundingTests::time).map(Instant::ofEpochMilli).collect(toList())) - ); - } - - private void assertInterval(long rounded, long nextRoundingValue, Rounding rounding, int minutes, - ZoneId tz) { - assertInterval(rounded, dateBetween(rounded, nextRoundingValue), nextRoundingValue, rounding, tz); - long millisPerMinute = 60_000; - assertEquals(millisPerMinute * minutes, nextRoundingValue - rounded); - } - - /** - * perform a number on assertions and checks on {@link org.elasticsearch.common.Rounding.TimeUnitRounding} intervals - * @param rounded the expected low end of the rounding interval - * @param unrounded a date in the interval to be checked for rounding - * @param nextRoundingValue the expected upper end of the rounding interval - * @param rounding the rounding instance - */ - private void assertInterval(long rounded, long unrounded, long nextRoundingValue, Rounding rounding, ZoneId tz) { - assertThat("rounding should be idempotent", rounding.round(rounded), isDate(rounded, tz)); - assertThat("rounded value smaller or equal than unrounded", rounded, lessThanOrEqualTo(unrounded)); - assertThat("values less than rounded should round further down", rounding.round(rounded - 1), lessThan(rounded)); - assertThat("nextRounding value should be a rounded date", rounding.round(nextRoundingValue), isDate(nextRoundingValue, tz)); - assertThat("values above nextRounding should round down there", rounding.round(nextRoundingValue + 1), - isDate(nextRoundingValue, tz)); - - if (isTimeWithWellDefinedRounding(tz, unrounded)) { - assertThat("nextRounding value should be greater than date" + rounding, nextRoundingValue, greaterThan(unrounded)); - - long dateBetween = dateBetween(rounded, nextRoundingValue); - long roundingDateBetween = rounding.round(dateBetween); - ZonedDateTime zonedDateBetween = ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateBetween), tz); - assertThat("dateBetween [" + zonedDateBetween + "/" + dateBetween + "] should round down to roundedDate [" + - Instant.ofEpochMilli(roundingDateBetween) + "]", roundingDateBetween, isDate(rounded, tz)); - assertThat("dateBetween [" + zonedDateBetween + "] should round up to nextRoundingValue", - rounding.nextRoundingValue(dateBetween), isDate(nextRoundingValue, tz)); - } - } - - private static boolean isTimeWithWellDefinedRounding(ZoneId tz, long t) { - if (tz.getId().equals("America/St_Johns") - || tz.getId().equals("America/Goose_Bay") - || tz.getId().equals("America/Moncton") - || tz.getId().equals("Canada/Newfoundland")) { - - // Clocks went back at 00:01 between 1987 and 2010, causing overlapping days. - // These timezones are otherwise uninteresting, so just skip this period. - - return t <= time("1987-10-01T00:00:00Z") - || t >= time("2010-12-01T00:00:00Z"); - } - - if (tz.getId().equals("Antarctica/Casey")) { - - // Clocks went back 3 hours at 02:00 on 2010-03-05, causing overlapping days. - - return t <= time("2010-03-03T00:00:00Z") - || t >= time("2010-03-07T00:00:00Z"); - } - if (tz.getId().equals("Pacific/Guam") || tz.getId().equals("Pacific/Saipan")) { - // Clocks went back at 00:01 in 1969, causing overlapping days. - return t <= time("1969-01-25T00:00:00Z") - || t >= time("1969-01-26T00:00:00Z"); - } - - return true; - } - - private static long randomDate() { - return Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 - } - - private static long[] randomDateBounds(Rounding.DateTimeUnit unit) { - long b1 = randomDate(); - if (randomBoolean()) { - // Sometimes use a fairly close date - return new long[] {b1, b1 + unit.extraLocalOffsetLookup() * between(1, 40)}; - } - // Otherwise use a totally random date - long b2 = randomValueOtherThan(b1, RoundingTests::randomDate); - if (b1 < b2) { - return new long[] {b1, b2}; - } - return new long[] {b2, b1}; - } - private static long dateBetween(long lower, long upper) { - long dateBetween = randomLongBetween(lower, upper - 1); - assert lower <= dateBetween && dateBetween < upper; - return dateBetween; - } - - private static long time(String time) { - return time(time, ZoneOffset.UTC); - } - - private static long time(String time, ZoneId zone) { - TemporalAccessor accessor = DateFormatter.forPattern("date_optional_time").withZone(zone).parse(time); - return DateFormatters.from(accessor).toInstant().toEpochMilli(); - } - - private static Matcher isDate(final long expected, ZoneId tz) { - return new TypeSafeMatcher() { - @Override - public boolean matchesSafely(final Long item) { - return expected == item; - } - - @Override - public void describeTo(Description description) { - ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(expected), tz); - description.appendText(DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(zonedDateTime) + " [" + expected + "] "); - } - - @Override - protected void describeMismatchSafely(final Long actual, final Description mismatchDescription) { - ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(actual), tz); - mismatchDescription.appendText(" was ") - .appendValue(DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(zonedDateTime) + " [" + actual + "]"); - } - }; - } - -} diff --git a/server/src/test/java/org/elasticsearch/common/RoundingWireTests.java b/server/src/test/java/org/elasticsearch/common/RoundingWireTests.java deleted file mode 100644 index 5c0808809e788..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/RoundingWireTests.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.common; - -import org.elasticsearch.common.Rounding.DateTimeUnit; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.AbstractWireSerializingTestCase; - -public class RoundingWireTests extends AbstractWireSerializingTestCase { - @Override - protected Rounding createTestInstance() { - Rounding.Builder builder; - if (randomBoolean()) { - builder = Rounding.builder(randomFrom(DateTimeUnit.values())); - } else { - // The time value's millisecond component must be > 0 so we're limited in the suffixes we can use. - final var tv = randomTimeValue(1, 1000, "d", "h", "ms", "s", "m"); - builder = Rounding.builder(TimeValue.parseTimeValue(tv, "test")); - } - if (randomBoolean()) { - builder.timeZone(randomZone()); - } - if (randomBoolean()) { - builder.offset(randomLong()); - } - return builder.build(); - } - - @Override - protected Reader instanceReader() { - return Rounding::read; - } -} diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index be4ba4d1b37cd..5eb1ab545ffb4 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -19,11 +19,10 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.EOFException; import java.io.IOException; +import java.time.Instant; import java.time.OffsetTime; import java.time.ZoneId; import java.time.ZonedDateTime; @@ -298,10 +297,10 @@ public void testSimpleStreams() throws Exception { out.writeOptionalBytesReference(new BytesArray("test")); out.writeOptionalDouble(null); out.writeOptionalDouble(1.2); - out.writeTimeZone(DateTimeZone.forID("CET")); - out.writeOptionalTimeZone(DateTimeZone.getDefault()); + out.writeZoneId(ZoneId.of("CET")); + out.writeOptionalZoneId(ZoneId.systemDefault()); out.writeOptionalTimeZone(null); - out.writeGenericValue(new DateTime(123456, DateTimeZone.forID("America/Los_Angeles"))); + out.writeGenericValue(ZonedDateTime.ofInstant(Instant.ofEpochMilli(123456), ZoneId.of("America/Los_Angeles"))); final OffsetTime offsetNow = OffsetTime.now(randomZone()); out.writeGenericValue(offsetNow); final byte[] bytes = BytesReference.toBytes(out.bytes()); @@ -333,8 +332,8 @@ public void testSimpleStreams() throws Exception { assertThat(in.readOptionalBytesReference(), equalTo(new BytesArray("test"))); assertNull(in.readOptionalDouble()); assertThat(in.readOptionalDouble(), closeTo(1.2, 0.0001)); - assertEquals(DateTimeZone.forID("CET"), in.readTimeZone()); - assertEquals(DateTimeZone.getDefault(), in.readOptionalTimeZone()); + assertEquals(ZoneId.of("CET"), in.readZoneId()); + assertEquals(ZoneId.systemDefault(), in.readOptionalZoneId()); assertNull(in.readOptionalTimeZone()); Object dt = in.readGenericValue(); assertThat(dt, instanceOf(ZonedDateTime.class)); diff --git a/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnit.java b/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnit.java deleted file mode 100644 index 7bd53f52979b7..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnit.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.common.rounding; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.joda.Joda; -import org.joda.time.DateTimeField; -import org.joda.time.DateTimeZone; -import org.joda.time.chrono.ISOChronology; - -import java.util.function.Function; - -public enum DateTimeUnit { - - WEEK_OF_WEEKYEAR( (byte) 1, tz -> ISOChronology.getInstance(tz).weekOfWeekyear()), - YEAR_OF_CENTURY( (byte) 2, tz -> ISOChronology.getInstance(tz).yearOfCentury()), - QUARTER( (byte) 3, tz -> Joda.QuarterOfYear.getField(ISOChronology.getInstance(tz))), - MONTH_OF_YEAR( (byte) 4, tz -> ISOChronology.getInstance(tz).monthOfYear()), - DAY_OF_MONTH( (byte) 5, tz -> ISOChronology.getInstance(tz).dayOfMonth()), - HOUR_OF_DAY( (byte) 6, tz -> ISOChronology.getInstance(tz).hourOfDay()), - MINUTES_OF_HOUR( (byte) 7, tz -> ISOChronology.getInstance(tz).minuteOfHour()), - SECOND_OF_MINUTE( (byte) 8, tz -> ISOChronology.getInstance(tz).secondOfMinute()); - - private final byte id; - private final Function fieldFunction; - - DateTimeUnit(byte id, Function fieldFunction) { - this.id = id; - this.fieldFunction = fieldFunction; - } - - public byte id() { - return id; - } - - /** - * @return the {@link DateTimeField} for the provided {@link DateTimeZone} for this time unit - */ - public DateTimeField field(DateTimeZone tz) { - return fieldFunction.apply(tz); - } - - public static DateTimeUnit resolve(byte id) { - switch (id) { - case 1: return WEEK_OF_WEEKYEAR; - case 2: return YEAR_OF_CENTURY; - case 3: return QUARTER; - case 4: return MONTH_OF_YEAR; - case 5: return DAY_OF_MONTH; - case 6: return HOUR_OF_DAY; - case 7: return MINUTES_OF_HOUR; - case 8: return SECOND_OF_MINUTE; - default: throw new ElasticsearchException("Unknown date time unit id [" + id + "]"); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java b/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java deleted file mode 100644 index 0aaf9fa5f1f8b..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.common.rounding; - -import org.elasticsearch.test.ESTestCase; - -import static org.elasticsearch.common.rounding.DateTimeUnit.DAY_OF_MONTH; -import static org.elasticsearch.common.rounding.DateTimeUnit.HOUR_OF_DAY; -import static org.elasticsearch.common.rounding.DateTimeUnit.MINUTES_OF_HOUR; -import static org.elasticsearch.common.rounding.DateTimeUnit.MONTH_OF_YEAR; -import static org.elasticsearch.common.rounding.DateTimeUnit.QUARTER; -import static org.elasticsearch.common.rounding.DateTimeUnit.SECOND_OF_MINUTE; -import static org.elasticsearch.common.rounding.DateTimeUnit.WEEK_OF_WEEKYEAR; -import static org.elasticsearch.common.rounding.DateTimeUnit.YEAR_OF_CENTURY; - -public class DateTimeUnitTests extends ESTestCase { - - /** - * test that we don't accidentally change enum ids - */ - public void testEnumIds() { - assertEquals(1, WEEK_OF_WEEKYEAR.id()); - assertEquals(WEEK_OF_WEEKYEAR, DateTimeUnit.resolve((byte) 1)); - - assertEquals(2, YEAR_OF_CENTURY.id()); - assertEquals(YEAR_OF_CENTURY, DateTimeUnit.resolve((byte) 2)); - - assertEquals(3, QUARTER.id()); - assertEquals(QUARTER, DateTimeUnit.resolve((byte) 3)); - - assertEquals(4, MONTH_OF_YEAR.id()); - assertEquals(MONTH_OF_YEAR, DateTimeUnit.resolve((byte) 4)); - - assertEquals(5, DAY_OF_MONTH.id()); - assertEquals(DAY_OF_MONTH, DateTimeUnit.resolve((byte) 5)); - - assertEquals(6, HOUR_OF_DAY.id()); - assertEquals(HOUR_OF_DAY, DateTimeUnit.resolve((byte) 6)); - - assertEquals(7, MINUTES_OF_HOUR.id()); - assertEquals(MINUTES_OF_HOUR, DateTimeUnit.resolve((byte) 7)); - - assertEquals(8, SECOND_OF_MINUTE.id()); - assertEquals(SECOND_OF_MINUTE, DateTimeUnit.resolve((byte) 8)); - } -} diff --git a/server/src/test/java/org/elasticsearch/common/rounding/Rounding.java b/server/src/test/java/org/elasticsearch/common/rounding/Rounding.java deleted file mode 100644 index ec9d52e4172db..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/rounding/Rounding.java +++ /dev/null @@ -1,416 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.common.rounding; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; -import org.joda.time.DateTimeField; -import org.joda.time.DateTimeZone; -import org.joda.time.IllegalInstantException; - -import java.io.IOException; -import java.util.Objects; - -/** - * A strategy for rounding long values. - * - * Use the java based Rounding class where applicable - */ -@Deprecated -public abstract class Rounding implements Writeable { - - public abstract byte id(); - - /** - * Rounds the given value. - */ - public abstract long round(long value); - - /** - * Given the rounded value (which was potentially generated by {@link #round(long)}, returns the next rounding value. For example, with - * interval based rounding, if the interval is 3, {@code nextRoundValue(6) = 9 }. - * - * @param value The current rounding value - * @return The next rounding value; - */ - public abstract long nextRoundingValue(long value); - - @Override - public abstract boolean equals(Object obj); - - @Override - public abstract int hashCode(); - - public static Builder builder(DateTimeUnit unit) { - return new Builder(unit); - } - - public static Builder builder(TimeValue interval) { - return new Builder(interval); - } - - public static class Builder { - - private final DateTimeUnit unit; - private final long interval; - - private DateTimeZone timeZone = DateTimeZone.UTC; - - public Builder(DateTimeUnit unit) { - this.unit = unit; - this.interval = -1; - } - - public Builder(TimeValue interval) { - this.unit = null; - if (interval.millis() < 1) - throw new IllegalArgumentException("Zero or negative time interval not supported"); - this.interval = interval.millis(); - } - - public Builder timeZone(DateTimeZone timeZone) { - if (timeZone == null) { - throw new IllegalArgumentException("Setting null as timezone is not supported"); - } - this.timeZone = timeZone; - return this; - } - - public Rounding build() { - Rounding timeZoneRounding; - if (unit != null) { - timeZoneRounding = new TimeUnitRounding(unit, timeZone); - } else { - timeZoneRounding = new TimeIntervalRounding(interval, timeZone); - } - return timeZoneRounding; - } - } - - static class TimeUnitRounding extends Rounding { - - static final byte ID = 1; - - private final DateTimeUnit unit; - private final DateTimeField field; - private final DateTimeZone timeZone; - private final boolean unitRoundsToMidnight; - - TimeUnitRounding(DateTimeUnit unit, DateTimeZone timeZone) { - this.unit = unit; - this.field = unit.field(timeZone); - unitRoundsToMidnight = this.field.getDurationField().getUnitMillis() > 60L * 60L * 1000L; - this.timeZone = timeZone; - } - - TimeUnitRounding(StreamInput in) throws IOException { - unit = DateTimeUnit.resolve(in.readByte()); - timeZone = DateTimeZone.forID(in.readString()); - field = unit.field(timeZone); - unitRoundsToMidnight = field.getDurationField().getUnitMillis() > 60L * 60L * 1000L; - } - - @Override - public byte id() { - return ID; - } - - /** - * @return The latest timestamp T which is strictly before utcMillis - * and such that timeZone.getOffset(T) != timeZone.getOffset(utcMillis). - * If there is no such T, returns Long.MAX_VALUE. - */ - private long previousTransition(long utcMillis) { - final int offsetAtInputTime = timeZone.getOffset(utcMillis); - do { - // Some timezones have transitions that do not change the offset, so we have to - // repeatedly call previousTransition until a nontrivial transition is found. - - long previousTransition = timeZone.previousTransition(utcMillis); - if (previousTransition == utcMillis) { - // There are no earlier transitions - return Long.MAX_VALUE; - } - assert previousTransition < utcMillis; // Progress was made - utcMillis = previousTransition; - } while (timeZone.getOffset(utcMillis) == offsetAtInputTime); - - return utcMillis; - } - - @Override - public long round(long utcMillis) { - - // field.roundFloor() works as long as the offset doesn't change. It is worth getting this case out of the way first, as - // the calculations for fixing things near to offset changes are a little expensive and are unnecessary in the common case - // of working in UTC. - if (timeZone.isFixed()) { - return field.roundFloor(utcMillis); - } - - // When rounding to hours we consider any local time of the form 'xx:00:00' as rounded, even though this gives duplicate - // bucket names for the times when the clocks go back. Shorter units behave similarly. However, longer units round down to - // midnight, and on the days where there are two midnights we would rather pick the earlier one, so that buckets are - // uniquely identified by the date. - if (unitRoundsToMidnight) { - final long anyLocalStartOfDay = field.roundFloor(utcMillis); - // `anyLocalStartOfDay` is _supposed_ to be the Unix timestamp for the start of the day in question in the current time - // zone. Mostly this just means "midnight", which is fine, and on days with no local midnight it's the first time that - // does occur on that day which is also ok. However, on days with >1 local midnight this is _one_ of the midnights, but - // may not be the first. Check whether this is happening, and fix it if so. - - final long previousTransition = previousTransition(anyLocalStartOfDay); - - if (previousTransition == Long.MAX_VALUE) { - // No previous transitions, so there can't be another earlier local midnight. - return anyLocalStartOfDay; - } - - final long currentOffset = timeZone.getOffset(anyLocalStartOfDay); - final long previousOffset = timeZone.getOffset(previousTransition); - assert currentOffset != previousOffset; - - // NB we only assume interference from one previous transition. It's theoretically possible to have two transitions in - // quick succession, both of which have a midnight in them, but this doesn't appear to happen in the TZDB so (a) it's - // pointless to implement and (b) it won't be tested. I recognise that this comment is tempting fate and will likely - // cause this very situation to occur in the near future, and eagerly look forward to fixing this using a loop over - // previous transitions when it happens. - - final long alsoLocalStartOfDay = anyLocalStartOfDay + currentOffset - previousOffset; - // `alsoLocalStartOfDay` is the Unix timestamp for the start of the day in question if the previous offset were in - // effect. - - if (alsoLocalStartOfDay <= previousTransition) { - // Therefore the previous offset _is_ in effect at `alsoLocalStartOfDay`, and it's earlier than anyLocalStartOfDay, - // so this is the answer to use. - return alsoLocalStartOfDay; - } - else { - // The previous offset is not in effect at `alsoLocalStartOfDay`, so the current offset must be. - return anyLocalStartOfDay; - } - - } else { - do { - long rounded = field.roundFloor(utcMillis); - - // field.roundFloor() mostly works as long as the offset hasn't changed in [rounded, utcMillis], so look at where - // the offset most recently changed. - - final long previousTransition = previousTransition(utcMillis); - - if (previousTransition == Long.MAX_VALUE || previousTransition < rounded) { - // The offset did not change in [rounded, utcMillis], so roundFloor() worked as expected. - return rounded; - } - - // The offset _did_ change in [rounded, utcMillis]. Put differently, this means that none of the times in - // [previousTransition+1, utcMillis] were rounded, so the rounded time must be <= previousTransition. This means - // it's sufficient to try and round previousTransition down. - assert previousTransition < utcMillis; - utcMillis = previousTransition; - } while (true); - } - } - - @Override - public long nextRoundingValue(long utcMillis) { - long floor = round(utcMillis); - // add one unit and round to get to next rounded value - long next = round(field.add(floor, 1)); - if (next == floor) { - // in rare case we need to add more than one unit - next = round(field.add(floor, 2)); - } - return next; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeByte(unit.id()); - out.writeString(timeZone.getID()); - } - - @Override - public int hashCode() { - return Objects.hash(unit, timeZone); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TimeUnitRounding other = (TimeUnitRounding) obj; - return Objects.equals(unit, other.unit) && Objects.equals(timeZone, other.timeZone); - } - - @Override - public String toString() { - return "[" + timeZone + "][" + unit + "]"; - } - } - - static class TimeIntervalRounding extends Rounding { - - static final byte ID = 2; - - private final long interval; - private final DateTimeZone timeZone; - - TimeIntervalRounding(long interval, DateTimeZone timeZone) { - if (interval < 1) - throw new IllegalArgumentException("Zero or negative time interval not supported"); - this.interval = interval; - this.timeZone = timeZone; - } - - TimeIntervalRounding(StreamInput in) throws IOException { - interval = in.readVLong(); - timeZone = DateTimeZone.forID(in.readString()); - } - - @Override - public byte id() { - return ID; - } - - @Override - public long round(long utcMillis) { - long timeLocal = timeZone.convertUTCToLocal(utcMillis); - long rounded = roundKey(timeLocal, interval) * interval; - long roundedUTC; - if (isInDSTGap(rounded) == false) { - roundedUTC = timeZone.convertLocalToUTC(rounded, true, utcMillis); - // check if we crossed DST transition, in this case we want the - // last rounded value before the transition - long transition = timeZone.previousTransition(utcMillis); - if (transition != utcMillis && transition > roundedUTC) { - roundedUTC = round(transition - 1); - } - } else { - /* - * Edge case where the rounded local time is illegal and landed - * in a DST gap. In this case, we choose 1ms tick after the - * transition date. We don't want the transition date itself - * because those dates, when rounded themselves, fall into the - * previous interval. This would violate the invariant that the - * rounding operation should be idempotent. - */ - roundedUTC = timeZone.previousTransition(utcMillis) + 1; - } - return roundedUTC; - } - - private static long roundKey(long value, long interval) { - if (value < 0) { - return (value - interval + 1) / interval; - } else { - return value / interval; - } - } - - /** - * Determine whether the local instant is a valid instant in the given - * time zone. The logic for this is taken from - * {@link DateTimeZone#convertLocalToUTC(long, boolean)} for the - * `strict` mode case, but instead of throwing an - * {@link IllegalInstantException}, which is costly, we want to return a - * flag indicating that the value is illegal in that time zone. - */ - private boolean isInDSTGap(long instantLocal) { - if (timeZone.isFixed()) { - return false; - } - // get the offset at instantLocal (first estimate) - int offsetLocal = timeZone.getOffset(instantLocal); - // adjust instantLocal using the estimate and recalc the offset - int offset = timeZone.getOffset(instantLocal - offsetLocal); - // if the offsets differ, we must be near a DST boundary - if (offsetLocal != offset) { - // determine if we are in the DST gap - long nextLocal = timeZone.nextTransition(instantLocal - offsetLocal); - if (nextLocal == (instantLocal - offsetLocal)) { - nextLocal = Long.MAX_VALUE; - } - long nextAdjusted = timeZone.nextTransition(instantLocal - offset); - if (nextAdjusted == (instantLocal - offset)) { - nextAdjusted = Long.MAX_VALUE; - } - if (nextLocal != nextAdjusted) { - // we are in the DST gap - return true; - } - } - return false; - } - - @Override - public long nextRoundingValue(long time) { - long timeLocal = time; - timeLocal = timeZone.convertUTCToLocal(time); - long next = timeLocal + interval; - return timeZone.convertLocalToUTC(next, false); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(interval); - out.writeString(timeZone.getID()); - } - - @Override - public int hashCode() { - return Objects.hash(interval, timeZone); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TimeIntervalRounding other = (TimeIntervalRounding) obj; - return Objects.equals(interval, other.interval) && Objects.equals(timeZone, other.timeZone); - } - } - - public static class Streams { - - public static void write(Rounding rounding, StreamOutput out) throws IOException { - out.writeByte(rounding.id()); - rounding.writeTo(out); - } - - public static Rounding read(StreamInput in) throws IOException { - Rounding rounding; - byte id = in.readByte(); - switch (id) { - case TimeUnitRounding.ID: - rounding = new TimeUnitRounding(in); - break; - case TimeIntervalRounding.ID: - rounding = new TimeIntervalRounding(in); - break; - default: - throw new ElasticsearchException("unknown rounding id [" + id + "]"); - } - return rounding; - } - - } - -} diff --git a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java b/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java deleted file mode 100644 index df0b0b7ea1c84..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.common.rounding; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; - -import java.time.ZoneOffset; - -import static org.hamcrest.Matchers.is; - -public class RoundingDuelTests extends ESTestCase { - - // dont include nano/micro seconds as rounding would become zero then and throw an exception - private static final String[] ALLOWED_TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m"}; - - public void testDuellingImplementations() { - org.elasticsearch.common.Rounding.DateTimeUnit randomDateTimeUnit = - randomFrom(org.elasticsearch.common.Rounding.DateTimeUnit.values()); - org.elasticsearch.common.Rounding.Prepared rounding; - Rounding roundingJoda; - - if (randomBoolean()) { - rounding = org.elasticsearch.common.Rounding.builder(randomDateTimeUnit).timeZone(ZoneOffset.UTC).build().prepareForUnknown(); - DateTimeUnit dateTimeUnit = DateTimeUnit.resolve(randomDateTimeUnit.getId()); - roundingJoda = Rounding.builder(dateTimeUnit).timeZone(DateTimeZone.UTC).build(); - } else { - TimeValue interval = timeValue(); - rounding = org.elasticsearch.common.Rounding.builder(interval).timeZone(ZoneOffset.UTC).build().prepareForUnknown(); - roundingJoda = Rounding.builder(interval).timeZone(DateTimeZone.UTC).build(); - } - - long roundValue = randomLong(); - assertThat(roundingJoda.round(roundValue), is(rounding.round(roundValue))); - } - - static TimeValue timeValue() { - return TimeValue.parseTimeValue(randomIntBetween(1, 1000) + randomFrom(ALLOWED_TIME_SUFFIXES), "settingName"); - } -} diff --git a/server/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/server/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java deleted file mode 100644 index 613cd2a26412e..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ /dev/null @@ -1,780 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.common.rounding; - -import org.elasticsearch.core.Tuple; -import org.elasticsearch.common.rounding.Rounding.TimeIntervalRounding; -import org.elasticsearch.common.rounding.Rounding.TimeUnitRounding; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; -import org.joda.time.DateTime; -import org.joda.time.DateTimeConstants; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThan; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.startsWith; - -public class TimeZoneRoundingTests extends ESTestCase { - - public void testUTCTimeUnitRounding() { - Rounding tzRounding = Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).build(); - DateTimeZone tz = DateTimeZone.UTC; - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-01T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-01T00:00:00.000Z")), isDate(time("2009-03-01T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-09T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-16T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.QUARTER).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-01T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-04-01T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.HOUR_OF_DAY).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T01:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-09T01:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.DAY_OF_MONTH).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-10T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-01T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2013-01-01T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.MINUTES_OF_HOUR).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T01:01:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-09T00:01:00.000Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.SECOND_OF_MINUTE).build(); - assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-10T01:01:01.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-09T00:00:01.000Z"), tz)); - } - - public void testUTCIntervalRounding() { - Rounding tzRounding = Rounding.builder(TimeValue.timeValueHours(12)).build(); - DateTimeZone tz = DateTimeZone.UTC; - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T00:00:00.000Z")), isDate(time("2009-02-03T12:00:00.000Z"), tz)); - assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T12:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T12:00:00.000Z")), isDate(time("2009-02-04T00:00:00.000Z"), tz)); - - tzRounding = Rounding.builder(TimeValue.timeValueHours(48)).build(); - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T00:00:00.000Z")), isDate(time("2009-02-05T00:00:00.000Z"), tz)); - assertThat(tzRounding.round(time("2009-02-05T13:01:01")), isDate(time("2009-02-05T00:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-05T00:00:00.000Z")), isDate(time("2009-02-07T00:00:00.000Z"), tz)); - } - - /** - * test TimeIntervalRounding, (interval < 12h) with time zone shift - */ - public void testTimeIntervalRounding() { - DateTimeZone tz = DateTimeZone.forOffsetHours(-1); - Rounding tzRounding = Rounding.builder(TimeValue.timeValueHours(6)).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T00:01:01")), isDate(time("2009-02-02T19:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-02T19:00:00.000Z")), isDate(time("2009-02-03T01:00:00.000Z"), tz)); - - assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T13:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T13:00:00.000Z")), isDate(time("2009-02-03T19:00:00.000Z"), tz)); - } - - /** - * test DayIntervalRounding, (interval >= 12h) with time zone shift - */ - public void testDayIntervalRounding() { - DateTimeZone tz = DateTimeZone.forOffsetHours(-8); - Rounding tzRounding = Rounding.builder(TimeValue.timeValueHours(12)).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T00:01:01")), isDate(time("2009-02-02T20:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-02T20:00:00.000Z")), isDate(time("2009-02-03T08:00:00.000Z"), tz)); - - assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T08:00:00.000Z"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T08:00:00.000Z")), isDate(time("2009-02-03T20:00:00.000Z"), tz)); - } - - public void testDayRounding() { - int timezoneOffset = -2; - Rounding tzRounding = Rounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forOffsetHours(timezoneOffset)) - .build(); - assertThat(tzRounding.round(0), equalTo(0L - TimeValue.timeValueHours(24 + timezoneOffset).millis())); - assertThat(tzRounding.nextRoundingValue(0L - TimeValue.timeValueHours(24 + timezoneOffset).millis()), equalTo(TimeValue - .timeValueHours(-timezoneOffset).millis())); - - DateTimeZone tz = DateTimeZone.forID("-08:00"); - tzRounding = Rounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2012-04-01T04:15:30Z")), isDate(time("2012-03-31T08:00:00Z"), tz)); - - tzRounding = Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(tz).build(); - assertThat(tzRounding.round(time("2012-04-01T04:15:30Z")), equalTo(time("2012-03-01T08:00:00Z"))); - - // date in Feb-3rd, but still in Feb-2nd in -02:00 timezone - tz = DateTimeZone.forID("-02:00"); - tzRounding = Rounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-02T02:00:00"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-02T02:00:00")), isDate(time("2009-02-03T02:00:00"), tz)); - - // date in Feb-3rd, also in -02:00 timezone - tzRounding = Rounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2009-02-03T02:01:01")), isDate(time("2009-02-03T02:00:00"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T02:00:00")), isDate(time("2009-02-04T02:00:00"), tz)); - } - - public void testTimeRounding() { - // hour unit - DateTimeZone tz = DateTimeZone.forOffsetHours(-2); - Rounding tzRounding = Rounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build(); - assertThat(tzRounding.round(0), equalTo(0L)); - assertThat(tzRounding.nextRoundingValue(0L), equalTo(TimeValue.timeValueHours(1L).getMillis())); - - assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T01:00:00"), tz)); - assertThat(tzRounding.nextRoundingValue(time("2009-02-03T01:00:00")), isDate(time("2009-02-03T02:00:00"), tz)); - } - - public void testTimeUnitRoundingDST() { - Rounding tzRounding; - // testing savings to non savings switch - DateTimeZone cet = DateTimeZone.forID("CET"); - tzRounding = Rounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(cet).build(); - assertThat(tzRounding.round(time("2014-10-26T01:01:01", cet)), isDate(time("2014-10-26T01:00:00+02:00"), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", cet)),isDate(time("2014-10-26T02:00:00+02:00"), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", cet)), isDate(time("2014-10-26T02:00:00+01:00"), cet)); - - // testing non savings to savings switch - tzRounding = Rounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(cet).build(); - assertThat(tzRounding.round(time("2014-03-30T01:01:01", cet)), isDate(time("2014-03-30T01:00:00+01:00"), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", cet)), isDate(time("2014-03-30T03:00:00", cet), cet)); - assertThat(tzRounding.nextRoundingValue(time("2014-03-30T03:00:00", cet)), isDate(time("2014-03-30T04:00:00", cet), cet)); - - // testing non savings to savings switch (America/Chicago) - DateTimeZone chg = DateTimeZone.forID("America/Chicago"); - Rounding tzRounding_utc = Rounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.UTC).build(); - assertThat(tzRounding.round(time("2014-03-09T03:01:01", chg)), isDate(time("2014-03-09T03:00:00", chg), chg)); - - Rounding tzRounding_chg = Rounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(chg).build(); - assertThat(tzRounding_chg.round(time("2014-03-09T03:01:01", chg)), isDate(time("2014-03-09T03:00:00", chg), chg)); - - // testing savings to non savings switch 2013 (America/Chicago) - assertThat(tzRounding_utc.round(time("2013-11-03T06:01:01", chg)), isDate(time("2013-11-03T06:00:00", chg), chg)); - assertThat(tzRounding_chg.round(time("2013-11-03T06:01:01", chg)), isDate(time("2013-11-03T06:00:00", chg), chg)); - - // testing savings to non savings switch 2014 (America/Chicago) - assertThat(tzRounding_utc.round(time("2014-11-02T06:01:01", chg)), isDate(time("2014-11-02T06:00:00", chg), chg)); - assertThat(tzRounding_chg.round(time("2014-11-02T06:01:01", chg)), isDate(time("2014-11-02T06:00:00", chg), chg)); - } - - /** - * Randomized test on TimeUnitRounding. Test uses random - * {@link DateTimeUnit} and {@link DateTimeZone} and often (50% of the time) - * chooses test dates that are exactly on or close to offset changes (e.g. - * DST) in the chosen time zone. - * - * It rounds the test date down and up and performs various checks on the - * rounding unit interval that is defined by this. Assumptions tested are - * described in - * {@link #assertInterval(long, long, long, Rounding, DateTimeZone)} - */ - public void testRoundingRandom() { - for (int i = 0; i < 1000; ++i) { - DateTimeUnit timeUnit = randomTimeUnit(); - DateTimeZone tz = randomDateTimeZone(); - Rounding rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 - long unitMillis = timeUnit.field(tz).getDurationField().getUnitMillis(); - if (randomBoolean()) { - nastyDate(date, tz, unitMillis); - } - final long roundedDate = rounding.round(date); - final long nextRoundingValue = rounding.nextRoundingValue(roundedDate); - - assertInterval(roundedDate, date, nextRoundingValue, rounding, tz); - - // check correct unit interval width for units smaller than a day, they should be fixed size except for transitions - if (unitMillis <= DateTimeConstants.MILLIS_PER_DAY) { - // if the interval defined didn't cross timezone offset transition, it should cover unitMillis width - if (tz.getOffset(roundedDate - 1) == tz.getOffset(nextRoundingValue + 1)) { - assertThat("unit interval width not as expected for [" + timeUnit + "], [" + tz + "] at " - + new DateTime(roundedDate), nextRoundingValue - roundedDate, equalTo(unitMillis)); - } - } - } - } - - /** - * To be even more nasty, go to a transition in the selected time zone. - * In one third of the cases stay there, otherwise go half a unit back or forth - */ - private static long nastyDate(long initialDate, DateTimeZone timezone, long unitMillis) { - long date = timezone.nextTransition(initialDate); - if (randomBoolean()) { - return date + (randomLong() % unitMillis); // positive and negative offset possible - } else { - return date; - } - } - - /** - * test DST end with interval rounding - * CET: 25 October 2015, 03:00:00 clocks were turned backward 1 hour to 25 October 2015, 02:00:00 local standard time - */ - public void testTimeIntervalCET_DST_End() { - long interval = TimeUnit.MINUTES.toMillis(20); - DateTimeZone tz = DateTimeZone.forID("CET"); - Rounding rounding = new TimeIntervalRounding(interval, tz); - - assertThat(rounding.round(time("2015-10-25T01:55:00+02:00")), isDate(time("2015-10-25T01:40:00+02:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:15:00+02:00")), isDate(time("2015-10-25T02:00:00+02:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:35:00+02:00")), isDate(time("2015-10-25T02:20:00+02:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:55:00+02:00")), isDate(time("2015-10-25T02:40:00+02:00"), tz)); - // after DST shift - assertThat(rounding.round(time("2015-10-25T02:15:00+01:00")), isDate(time("2015-10-25T02:00:00+01:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:35:00+01:00")), isDate(time("2015-10-25T02:20:00+01:00"), tz)); - assertThat(rounding.round(time("2015-10-25T02:55:00+01:00")), isDate(time("2015-10-25T02:40:00+01:00"), tz)); - assertThat(rounding.round(time("2015-10-25T03:15:00+01:00")), isDate(time("2015-10-25T03:00:00+01:00"), tz)); - } - - /** - * test DST start with interval rounding - * CET: 27 March 2016, 02:00:00 clocks were turned forward 1 hour to 27 March 2016, 03:00:00 local daylight time - */ - public void testTimeIntervalCET_DST_Start() { - long interval = TimeUnit.MINUTES.toMillis(20); - DateTimeZone tz = DateTimeZone.forID("CET"); - Rounding rounding = new TimeIntervalRounding(interval, tz); - // test DST start - assertThat(rounding.round(time("2016-03-27T01:55:00+01:00")), isDate(time("2016-03-27T01:40:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T02:00:00+01:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:15:00+02:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:35:00+02:00")), isDate(time("2016-03-27T03:20:00+02:00"), tz)); - } - - /** - * test DST start with offset not fitting interval, e.g. Asia/Kathmandu - * adding 15min on 1986-01-01T00:00:00 the interval from - * 1986-01-01T00:15:00+05:45 to 1986-01-01T00:20:00+05:45 to only be 5min - * long - */ - public void testTimeInterval_Kathmandu_DST_Start() { - long interval = TimeUnit.MINUTES.toMillis(20); - DateTimeZone tz = DateTimeZone.forID("Asia/Kathmandu"); - Rounding rounding = new TimeIntervalRounding(interval, tz); - assertThat(rounding.round(time("1985-12-31T23:55:00+05:30")), isDate(time("1985-12-31T23:40:00+05:30"), tz)); - assertThat(rounding.round(time("1986-01-01T00:16:00+05:45")), isDate(time("1986-01-01T00:15:00+05:45"), tz)); - assertThat(time("1986-01-01T00:15:00+05:45") - time("1985-12-31T23:40:00+05:30"), equalTo(TimeUnit.MINUTES.toMillis(20))); - assertThat(rounding.round(time("1986-01-01T00:26:00+05:45")), isDate(time("1986-01-01T00:20:00+05:45"), tz)); - assertThat(time("1986-01-01T00:20:00+05:45") - time("1986-01-01T00:15:00+05:45"), equalTo(TimeUnit.MINUTES.toMillis(5))); - assertThat(rounding.round(time("1986-01-01T00:46:00+05:45")), isDate(time("1986-01-01T00:40:00+05:45"), tz)); - assertThat(time("1986-01-01T00:40:00+05:45") - time("1986-01-01T00:20:00+05:45"), equalTo(TimeUnit.MINUTES.toMillis(20))); - } - - /** - * Special test for intervals that don't fit evenly into rounding interval. - * In this case, when interval crosses DST transition point, rounding in local - * time can land in a DST gap which results in wrong UTC rounding values. - */ - public void testIntervalRounding_NotDivisibleInteval() { - DateTimeZone tz = DateTimeZone.forID("CET"); - long interval = TimeUnit.MINUTES.toMillis(14); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - - assertThat(rounding.round(time("2016-03-27T01:41:00+01:00")), isDate(time("2016-03-27T01:30:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T01:51:00+01:00")), isDate(time("2016-03-27T01:44:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T01:59:00+01:00")), isDate(time("2016-03-27T01:58:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:05:00+02:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:12:00+02:00")), isDate(time("2016-03-27T03:08:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:25:00+02:00")), isDate(time("2016-03-27T03:22:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-27T03:39:00+02:00")), isDate(time("2016-03-27T03:36:00+02:00"), tz)); - } - - /** - * Test for half day rounding intervals scrossing DST. - */ - public void testIntervalRounding_HalfDay_DST() { - DateTimeZone tz = DateTimeZone.forID("CET"); - long interval = TimeUnit.HOURS.toMillis(12); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - - assertThat(rounding.round(time("2016-03-26T01:00:00+01:00")), isDate(time("2016-03-26T00:00:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-26T13:00:00+01:00")), isDate(time("2016-03-26T12:00:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T01:00:00+01:00")), isDate(time("2016-03-27T00:00:00+01:00"), tz)); - assertThat(rounding.round(time("2016-03-27T13:00:00+02:00")), isDate(time("2016-03-27T12:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-28T01:00:00+02:00")), isDate(time("2016-03-28T00:00:00+02:00"), tz)); - assertThat(rounding.round(time("2016-03-28T13:00:00+02:00")), isDate(time("2016-03-28T12:00:00+02:00"), tz)); - } - - /** - * randomized test on {@link TimeIntervalRounding} with random interval and time zone offsets - */ - public void testIntervalRoundingRandom() { - for (int i = 0; i < 1000; i++) { - TimeUnit unit = randomFrom(new TimeUnit[] {TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS}); - long interval = unit.toMillis(randomIntBetween(1, 365)); - DateTimeZone tz = randomDateTimeZone(); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - long mainDate = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 - if (randomBoolean()) { - mainDate = nastyDate(mainDate, tz, interval); - } - // check two intervals around date - long previousRoundedValue = Long.MIN_VALUE; - for (long date = mainDate - 2 * interval; date < mainDate + 2 * interval; date += interval / 2) { - try { - final long roundedDate = rounding.round(date); - final long nextRoundingValue = rounding.nextRoundingValue(roundedDate); - assertThat("Rounding should be idempotent", roundedDate, equalTo(rounding.round(roundedDate))); - assertThat("Rounded value smaller or equal than unrounded", roundedDate, lessThanOrEqualTo(date)); - assertThat("Values smaller than rounded value should round further down", rounding.round(roundedDate - 1), - lessThan(roundedDate)); - assertThat("Rounding should be >= previous rounding value", roundedDate, greaterThanOrEqualTo(previousRoundedValue)); - - if (tz.isFixed()) { - assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate)); - assertThat("NextRounding value should be interval from rounded value", nextRoundingValue - roundedDate, - equalTo(interval)); - assertThat("NextRounding value should be a rounded date", nextRoundingValue, - equalTo(rounding.round(nextRoundingValue))); - } - previousRoundedValue = roundedDate; - } catch (AssertionError e) { - logger.error("Rounding error at {}, timezone {}, interval: {},", new DateTime(date, tz), tz, interval); - throw e; - } - } - } - } - - /** - * Test that rounded values are always greater or equal to last rounded value if date is increasing. - * The example covers an interval around 2011-10-30T02:10:00+01:00, time zone CET, interval: 2700000ms - */ - public void testIntervalRoundingMonotonic_CET() { - long interval = TimeUnit.MINUTES.toMillis(45); - DateTimeZone tz = DateTimeZone.forID("CET"); - Rounding rounding = new Rounding.TimeIntervalRounding(interval, tz); - List> expectedDates = new ArrayList<>(); - // first date is the date to be rounded, second the expected result - expectedDates.add(new Tuple<>("2011-10-30T01:40:00.000+02:00", "2011-10-30T01:30:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:02:30.000+02:00", "2011-10-30T01:30:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:25:00.000+02:00", "2011-10-30T02:15:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:47:30.000+02:00", "2011-10-30T02:15:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:10:00.000+01:00", "2011-10-30T02:15:00.000+02:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:32:30.000+01:00", "2011-10-30T02:15:00.000+01:00")); - expectedDates.add(new Tuple<>("2011-10-30T02:55:00.000+01:00", "2011-10-30T02:15:00.000+01:00")); - expectedDates.add(new Tuple<>("2011-10-30T03:17:30.000+01:00", "2011-10-30T03:00:00.000+01:00")); - - long previousDate = Long.MIN_VALUE; - for (Tuple dates : expectedDates) { - final long roundedDate = rounding.round(time(dates.v1())); - assertThat(roundedDate, isDate(time(dates.v2()), tz)); - assertThat(roundedDate, greaterThanOrEqualTo(previousDate)); - previousDate = roundedDate; - } - // here's what this means for interval widths - assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T02:15:00.000+02:00") - time("2011-10-30T01:30:00.000+02:00")); - assertEquals(TimeUnit.MINUTES.toMillis(60), time("2011-10-30T02:15:00.000+01:00") - time("2011-10-30T02:15:00.000+02:00")); - assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T03:00:00.000+01:00") - time("2011-10-30T02:15:00.000+01:00")); - } - - /** - * special test for DST switch from #9491 - */ - public void testAmbiguousHoursAfterDSTSwitch() { - Rounding tzRounding; - final DateTimeZone tz = DateTimeZone.forID("Asia/Jerusalem"); - tzRounding = Rounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-10-26T00:30:00+03:00")), isDate(time("2014-10-26T00:00:00+03:00"), tz)); - assertThat(tzRounding.round(time("2014-10-26T01:30:00+03:00")), isDate(time("2014-10-26T01:00:00+03:00"), tz)); - // the utc date for "2014-10-25T03:00:00+03:00" and "2014-10-25T03:00:00+02:00" is the same, local time turns back 1h here - assertThat(time("2014-10-26T03:00:00+03:00"), isDate(time("2014-10-26T02:00:00+02:00"), tz)); - assertThat(tzRounding.round(time("2014-10-26T01:30:00+02:00")), isDate(time("2014-10-26T01:00:00+02:00"), tz)); - assertThat(tzRounding.round(time("2014-10-26T02:30:00+02:00")), isDate(time("2014-10-26T02:00:00+02:00"), tz)); - - // Day interval - tzRounding = Rounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-11-11T00:00:00", tz), tz)); - // DST on - assertThat(tzRounding.round(time("2014-08-11T17:00:00", tz)), isDate(time("2014-08-11T00:00:00", tz), tz)); - // Day of switching DST on -> off - assertThat(tzRounding.round(time("2014-10-26T17:00:00", tz)), isDate(time("2014-10-26T00:00:00", tz), tz)); - // Day of switching DST off -> on - assertThat(tzRounding.round(time("2015-03-27T17:00:00", tz)), isDate(time("2015-03-27T00:00:00", tz), tz)); - - // Month interval - tzRounding = Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-11-01T00:00:00", tz), tz)); - // DST on - assertThat(tzRounding.round(time("2014-10-10T17:00:00", tz)), isDate(time("2014-10-01T00:00:00", tz), tz)); - - // Year interval - tzRounding = Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-01-01T00:00:00", tz), tz)); - - // Two timestamps in same year and different timezone offset ("Double buckets" issue - #9491) - tzRounding = Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(tz).build(); - assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), - isDate(tzRounding.round(time("2014-08-11T17:00:00", tz)), tz)); - } - - /** - * test for #10025, strict local to UTC conversion can cause joda exceptions - * on DST start - */ - public void testLenientConversionDST() { - DateTimeZone tz = DateTimeZone.forID("America/Sao_Paulo"); - long start = time("2014-10-18T20:50:00.000", tz); - long end = time("2014-10-19T01:00:00.000", tz); - Rounding tzRounding = new Rounding.TimeUnitRounding(DateTimeUnit.MINUTES_OF_HOUR, tz); - Rounding dayTzRounding = new Rounding.TimeIntervalRounding(60000, tz); - for (long time = start; time < end; time = time + 60000) { - assertThat(tzRounding.nextRoundingValue(time), greaterThan(time)); - assertThat(dayTzRounding.nextRoundingValue(time), greaterThan(time)); - } - } - - public void testEdgeCasesTransition() { - { - // standard +/-1 hour DST transition, CET - DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY; - DateTimeZone tz = DateTimeZone.forID("CET"); - Rounding rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - // 29 Mar 2015 - Daylight Saving Time Started - // at 02:00:00 clocks were turned forward 1 hour to 03:00:00 - assertInterval(time("2015-03-29T00:00:00.000+01:00"), time("2015-03-29T01:00:00.000+01:00"), rounding, 60, tz); - assertInterval(time("2015-03-29T01:00:00.000+01:00"), time("2015-03-29T03:00:00.000+02:00"), rounding, 60, tz); - assertInterval(time("2015-03-29T03:00:00.000+02:00"), time("2015-03-29T04:00:00.000+02:00"), rounding, 60, tz); - - // 25 Oct 2015 - Daylight Saving Time Ended - // at 03:00:00 clocks were turned backward 1 hour to 02:00:00 - assertInterval(time("2015-10-25T01:00:00.000+02:00"), time("2015-10-25T02:00:00.000+02:00"), rounding, 60, tz); - assertInterval(time("2015-10-25T02:00:00.000+02:00"), time("2015-10-25T02:00:00.000+01:00"), rounding, 60, tz); - assertInterval(time("2015-10-25T02:00:00.000+01:00"), time("2015-10-25T03:00:00.000+01:00"), rounding, 60, tz); - } - - { - // time zone "Asia/Kathmandu" - // 1 Jan 1986 - Time Zone Change (IST → NPT), at 00:00:00 clocks were turned forward 00:15 minutes - // - // hour rounding is stable before 1985-12-31T23:00:00.000 and after 1986-01-01T01:00:00.000+05:45 - // the interval between is 105 minutes long because the hour after transition starts at 00:15 - // which is not a round value for hourly rounding - DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY; - DateTimeZone tz = DateTimeZone.forID("Asia/Kathmandu"); - Rounding rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - assertInterval(time("1985-12-31T22:00:00.000+05:30"), time("1985-12-31T23:00:00.000+05:30"), rounding, 60, tz); - assertInterval(time("1985-12-31T23:00:00.000+05:30"), time("1986-01-01T01:00:00.000+05:45"), rounding, 105, tz); - assertInterval(time("1986-01-01T01:00:00.000+05:45"), time("1986-01-01T02:00:00.000+05:45"), rounding, 60, tz); - } - - { - // time zone "Australia/Lord_Howe" - // 3 Mar 1991 - Daylight Saving Time Ended - // at 02:00:00 clocks were turned backward 0:30 hours to Sunday, 3 March 1991, 01:30:00 - DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY; - DateTimeZone tz = DateTimeZone.forID("Australia/Lord_Howe"); - Rounding rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - assertInterval(time("1991-03-03T00:00:00.000+11:00"), time("1991-03-03T01:00:00.000+11:00"), rounding, 60, tz); - assertInterval(time("1991-03-03T01:00:00.000+11:00"), time("1991-03-03T02:00:00.000+10:30"), rounding, 90, tz); - assertInterval(time("1991-03-03T02:00:00.000+10:30"), time("1991-03-03T03:00:00.000+10:30"), rounding, 60, tz); - - // 27 Oct 1991 - Daylight Saving Time Started - // at 02:00:00 clocks were turned forward 0:30 hours to 02:30:00 - assertInterval(time("1991-10-27T00:00:00.000+10:30"), time("1991-10-27T01:00:00.000+10:30"), rounding, 60, tz); - // the interval containing the switch time is 90 minutes long - assertInterval(time("1991-10-27T01:00:00.000+10:30"), time("1991-10-27T03:00:00.000+11:00"), rounding, 90, tz); - assertInterval(time("1991-10-27T03:00:00.000+11:00"), time("1991-10-27T04:00:00.000+11:00"), rounding, 60, tz); - } - - { - // time zone "Pacific/Chatham" - // 5 Apr 2015 - Daylight Saving Time Ended - // at 03:45:00 clocks were turned backward 1 hour to 02:45:00 - DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY; - DateTimeZone tz = DateTimeZone.forID("Pacific/Chatham"); - Rounding rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - assertInterval(time("2015-04-05T02:00:00.000+13:45"), time("2015-04-05T03:00:00.000+13:45"), rounding, 60, tz); - assertInterval(time("2015-04-05T03:00:00.000+13:45"), time("2015-04-05T03:00:00.000+12:45"), rounding, 60, tz); - assertInterval(time("2015-04-05T03:00:00.000+12:45"), time("2015-04-05T04:00:00.000+12:45"), rounding, 60, tz); - - // 27 Sep 2015 - Daylight Saving Time Started - // at 02:45:00 clocks were turned forward 1 hour to 03:45:00 - - assertInterval(time("2015-09-27T01:00:00.000+12:45"), time("2015-09-27T02:00:00.000+12:45"), rounding, 60, tz); - assertInterval(time("2015-09-27T02:00:00.000+12:45"), time("2015-09-27T04:00:00.000+13:45"), rounding, 60, tz); - assertInterval(time("2015-09-27T04:00:00.000+13:45"), time("2015-09-27T05:00:00.000+13:45"), rounding, 60, tz); - } - } - - public void testDST_Europe_Rome() { - // time zone "Europe/Rome", rounding to days. Rome had two midnights on the day the clocks went back in 1978, and - // timeZone.convertLocalToUTC() gives the later of the two because Rome is east of UTC, whereas we want the earlier. - - DateTimeUnit timeUnit = DateTimeUnit.DAY_OF_MONTH; - DateTimeZone tz = DateTimeZone.forID("Europe/Rome"); - Rounding rounding = new TimeUnitRounding(timeUnit, tz); - - { - long timeBeforeFirstMidnight = time("1978-09-30T23:59:00+02:00"); - long floor = rounding.round(timeBeforeFirstMidnight); - assertThat(floor, isDate(time("1978-09-30T00:00:00+02:00"), tz)); - } - - { - long timeBetweenMidnights = time("1978-10-01T00:30:00+02:00"); - long floor = rounding.round(timeBetweenMidnights); - assertThat(floor, isDate(time("1978-10-01T00:00:00+02:00"), tz)); - } - - { - long timeAfterSecondMidnight = time("1978-10-01T00:30:00+01:00"); - long floor = rounding.round(timeAfterSecondMidnight); - assertThat(floor, isDate(time("1978-10-01T00:00:00+02:00"), tz)); - - long prevFloor = rounding.round(floor - 1); - assertThat(prevFloor, lessThan(floor)); - assertThat(prevFloor, isDate(time("1978-09-30T00:00:00+02:00"), tz)); - } - } - - /** - * Test for a time zone whose days overlap because the clocks are set back across midnight at the end of DST. - */ - public void testDST_America_St_Johns() { - // time zone "America/St_Johns", rounding to days. - DateTimeUnit timeUnit = DateTimeUnit.DAY_OF_MONTH; - DateTimeZone tz = DateTimeZone.forID("America/St_Johns"); - Rounding rounding = new TimeUnitRounding(timeUnit, tz); - - // 29 October 2006 - Daylight Saving Time ended, changing the UTC offset from -02:30 to -03:30. - // This happened at 02:31 UTC, 00:01 local time, so the clocks were set back 1 hour to 23:01 on the 28th. - // This means that 2006-10-29 has _two_ midnights, one in the -02:30 offset and one in the -03:30 offset. - // Only the first of these is considered "rounded". Moreover, the extra time between 23:01 and 23:59 - // should be considered as part of the 28th even though it comes after midnight on the 29th. - - { - // Times before the first midnight should be rounded up to the first midnight. - long timeBeforeFirstMidnight = time("2006-10-28T23:30:00.000-02:30"); - long floor = rounding.round(timeBeforeFirstMidnight); - assertThat(floor, isDate(time("2006-10-28T00:00:00.000-02:30"), tz)); - long ceiling = rounding.nextRoundingValue(timeBeforeFirstMidnight); - assertThat(ceiling, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - assertInterval(floor, timeBeforeFirstMidnight, ceiling, rounding, tz); - } - - { - // Times between the two midnights which are on the later day should be rounded down to the later day's midnight. - long timeBetweenMidnights = time("2006-10-29T00:00:30.000-02:30"); - // (this is halfway through the last minute before the clocks changed, in which local time was ambiguous) - - long floor = rounding.round(timeBetweenMidnights); - assertThat(floor, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - - long ceiling = rounding.nextRoundingValue(timeBetweenMidnights); - assertThat(ceiling, isDate(time("2006-10-30T00:00:00.000-03:30"), tz)); - - assertInterval(floor, timeBetweenMidnights, ceiling, rounding, tz); - } - - { - // Times between the two midnights which are on the earlier day should be rounded down to the earlier day's midnight. - long timeBetweenMidnights = time("2006-10-28T23:30:00.000-03:30"); - // (this is halfway through the hour after the clocks changed, in which local time was ambiguous) - - long floor = rounding.round(timeBetweenMidnights); - assertThat(floor, isDate(time("2006-10-28T00:00:00.000-02:30"), tz)); - - long ceiling = rounding.nextRoundingValue(timeBetweenMidnights); - assertThat(ceiling, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - - assertInterval(floor, timeBetweenMidnights, ceiling, rounding, tz); - } - - { - // Times after the second midnight should be rounded down to the first midnight. - long timeAfterSecondMidnight = time("2006-10-29T06:00:00.000-03:30"); - long floor = rounding.round(timeAfterSecondMidnight); - assertThat(floor, isDate(time("2006-10-29T00:00:00.000-02:30"), tz)); - long ceiling = rounding.nextRoundingValue(timeAfterSecondMidnight); - assertThat(ceiling, isDate(time("2006-10-30T00:00:00.000-03:30"), tz)); - assertInterval(floor, timeAfterSecondMidnight, ceiling, rounding, tz); - } - } - - /** - * tests for dst transition with overlaps and day roundings. - */ - public void testDST_END_Edgecases() { - // First case, dst happens at 1am local time, switching back one hour. - // We want the overlapping hour to count for the next day, making it a 25h interval - - DateTimeUnit timeUnit = DateTimeUnit.DAY_OF_MONTH; - DateTimeZone tz = DateTimeZone.forID("Atlantic/Azores"); - Rounding rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - // Sunday, 29 October 2000, 01:00:00 clocks were turned backward 1 hour - // to Sunday, 29 October 2000, 00:00:00 local standard time instead - // which means there were two midnights that day. - - long midnightBeforeTransition = time("2000-10-29T00:00:00", tz); - long midnightOfTransition = time("2000-10-29T00:00:00-01:00"); - assertEquals(60L * 60L * 1000L, midnightOfTransition - midnightBeforeTransition); - long nextMidnight = time("2000-10-30T00:00:00", tz); - - assertInterval(midnightBeforeTransition, nextMidnight, rounding, 25 * 60, tz); - - assertThat(rounding.round(time("2000-10-29T06:00:00-01:00")), isDate(time("2000-10-29T00:00:00Z"), tz)); - - // Second case, dst happens at 0am local time, switching back one hour to 23pm local time. - // We want the overlapping hour to count for the previous day here - - tz = DateTimeZone.forID("America/Lima"); - rounding = new Rounding.TimeUnitRounding(timeUnit, tz); - - // Sunday, 1 April 1990, 00:00:00 clocks were turned backward 1 hour to - // Saturday, 31 March 1990, 23:00:00 local standard time instead - - midnightBeforeTransition = time("1990-03-31T00:00:00.000-04:00"); - nextMidnight = time("1990-04-01T00:00:00.000-05:00"); - assertInterval(midnightBeforeTransition, nextMidnight, rounding, 25 * 60, tz); - - // make sure the next interval is 24h long again - long midnightAfterTransition = time("1990-04-01T00:00:00.000-05:00"); - nextMidnight = time("1990-04-02T00:00:00.000-05:00"); - assertInterval(midnightAfterTransition, nextMidnight, rounding, 24 * 60, tz); - } - - /** - * Test that time zones are correctly parsed. There is a bug with - * Joda 2.9.4 (see https://github.com/JodaOrg/joda-time/issues/373) - */ - public void testsTimeZoneParsing() { - final DateTime expected = new DateTime(2016, 11, 10, 5, 37, 59, randomDateTimeZone()); - - // Formatter used to print and parse the sample date. - // Printing the date works but parsing it back fails - // with Joda 2.9.4 - DateTimeFormatter formatter = DateTimeFormat.forPattern("YYYY-MM-dd'T'HH:mm:ss " + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'")); - - String dateTimeAsString = formatter.print(expected); - assertThat(dateTimeAsString, startsWith("2016-11-10T05:37:59 ")); - - DateTime parsedDateTime = formatter.parseDateTime(dateTimeAsString); - assertThat(parsedDateTime.getZone(), equalTo(expected.getZone())); - } - - private static void assertInterval(long rounded, long nextRoundingValue, Rounding rounding, int minutes, - DateTimeZone tz) { - assertInterval(rounded, dateBetween(rounded, nextRoundingValue), nextRoundingValue, rounding, tz); - assertEquals(DateTimeConstants.MILLIS_PER_MINUTE * minutes, nextRoundingValue - rounded); - } - - /** - * perform a number on assertions and checks on {@link TimeUnitRounding} intervals - * @param rounded the expected low end of the rounding interval - * @param unrounded a date in the interval to be checked for rounding - * @param nextRoundingValue the expected upper end of the rounding interval - * @param rounding the rounding instance - */ - private static void assertInterval(long rounded, long unrounded, long nextRoundingValue, Rounding rounding, DateTimeZone tz) { - assertThat("rounding should be idempotent ", rounding.round(rounded), isDate(rounded, tz)); - assertThat("rounded value smaller or equal than unrounded" + rounding, rounded, lessThanOrEqualTo(unrounded)); - assertThat("values less than rounded should round further down" + rounding, rounding.round(rounded - 1), lessThan(rounded)); - assertThat("nextRounding value should be a rounded date", rounding.round(nextRoundingValue), isDate(nextRoundingValue, tz)); - assertThat("values above nextRounding should round down there", rounding.round(nextRoundingValue + 1), - isDate(nextRoundingValue, tz)); - - if (isTimeWithWellDefinedRounding(tz, unrounded)) { - assertThat("nextRounding value should be greater than date" + rounding, nextRoundingValue, greaterThan(unrounded)); - - long dateBetween = dateBetween(rounded, nextRoundingValue); - assertThat("dateBetween [" + new DateTime(dateBetween, tz) + "] should round down to roundedDate", - rounding.round(dateBetween), isDate(rounded, tz)); - assertThat("dateBetween [" + new DateTime(dateBetween, tz) + "] should round up to nextRoundingValue", - rounding.nextRoundingValue(dateBetween), isDate(nextRoundingValue, tz)); - } - } - - private static boolean isTimeWithWellDefinedRounding(DateTimeZone tz, long t) { - if (tz.getID().equals("America/St_Johns") - || tz.getID().equals("America/Goose_Bay") - || tz.getID().equals("America/Moncton") - || tz.getID().equals("Canada/Newfoundland")) { - - // Clocks went back at 00:01 between 1987 and 2010, causing overlapping days. - // These timezones are otherwise uninteresting, so just skip this period. - - return t <= time("1987-10-01T00:00:00Z") - || t >= time("2010-12-01T00:00:00Z"); - } - - if (tz.getID().equals("Antarctica/Casey")) { - - // Clocks went back 3 hours at 02:00 on 2010-03-05, causing overlapping days. - - return t <= time("2010-03-03T00:00:00Z") - || t >= time("2010-03-07T00:00:00Z"); - } - - return true; - } - - private static long dateBetween(long lower, long upper) { - long dateBetween = randomLongBetween(lower, upper - 1); - assert lower <= dateBetween && dateBetween < upper; - return dateBetween; - } - - private static DateTimeUnit randomTimeUnit() { - byte id = (byte) randomIntBetween(1, 8); - return DateTimeUnit.resolve(id); - } - - private static long time(String time) { - return time(time, DateTimeZone.UTC); - } - - private static long time(String time, DateTimeZone zone) { - return ISODateTimeFormat.dateOptionalTimeParser().withZone(zone).parseMillis(time); - } - - private static Matcher isDate(final long expected, DateTimeZone tz) { - return new TypeSafeMatcher() { - @Override - public boolean matchesSafely(final Long item) { - return expected == item.longValue(); - } - - @Override - public void describeTo(Description description) { - description.appendText(new DateTime(expected, tz) + " [" + expected + "] "); - } - - @Override - protected void describeMismatchSafely(final Long actual, final Description mismatchDescription) { - mismatchDescription.appendText(" was ").appendValue(new DateTime(actual, tz) + " [" + actual + "]"); - } - }; - } -} diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 6904ae36d5439..98953cbaea3e6 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -19,20 +19,14 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.PathUtils; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.core.PathUtils; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; import org.hamcrest.Matchers; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.Instant; -import org.joda.time.ReadableInstant; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -53,14 +47,15 @@ import java.time.Year; import java.time.ZoneOffset; import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Date; +import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -402,78 +397,21 @@ public void testText() throws Exception { } } - public void testReadableInstant() throws Exception { - assertResult("{'instant':null}", () -> builder().startObject().timeField("instant", (ReadableInstant) null).endObject()); - assertResult("{'instant':null}", () -> builder().startObject().field("instant").timeValue((ReadableInstant) null).endObject()); - - final DateTime t1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC); - - String expected = "{'t1':'2016-01-01T00:00:00.000Z'}"; - assertResult(expected, () -> builder().startObject().timeField("t1", t1).endObject()); - assertResult(expected, () -> builder().startObject().field("t1").timeValue(t1).endObject()); - - final DateTime t2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC); - - expected = "{'t2':'2016-12-25T07:59:42.213Z'}"; - assertResult(expected, () -> builder().startObject().timeField("t2", t2).endObject()); - assertResult(expected, () -> builder().startObject().field("t2").timeValue(t2).endObject()); - - final DateTimeFormatter formatter = randomFrom(ISODateTimeFormat.basicDate(), ISODateTimeFormat.dateTimeNoMillis()); - final DateTime t3 = DateTime.now(); - - expected = "{'t3':'" + formatter.print(t3) + "'}"; - assertResult(expected, () -> builder().startObject().timeField("t3", formatter.print(t3)).endObject()); - assertResult(expected, () -> builder().startObject().field("t3").value(formatter.print(t3)).endObject()); - - final DateTime t4 = new DateTime(randomDateTimeZone()); - - expected = "{'t4':'" + formatter.print(t4) + "'}"; - assertResult(expected, () -> builder().startObject().timeField("t4", formatter.print(t4)).endObject()); - assertResult(expected, () -> builder().startObject().field("t4").value(formatter.print(t4)).endObject()); - - long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 - final DateTime t5 = new DateTime(date, randomDateTimeZone()); - - expected = "{'t5':'" + XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(t5) + "'}"; - assertResult(expected, () -> builder().startObject().timeField("t5", t5).endObject()); - assertResult(expected, () -> builder().startObject().field("t5").timeValue(t5).endObject()); - - expected = "{'t5':'" + formatter.print(t5) + "'}"; - assertResult(expected, () -> builder().startObject().timeField("t5", formatter.print(t5)).endObject()); - assertResult(expected, () -> builder().startObject().field("t5").value(formatter.print(t5)).endObject()); - - Instant i1 = new Instant(1451606400000L); // 2016-01-01T00:00:00.000Z - expected = "{'i1':'2016-01-01T00:00:00.000Z'}"; - assertResult(expected, () -> builder().startObject().timeField("i1", i1).endObject()); - assertResult(expected, () -> builder().startObject().field("i1").timeValue(i1).endObject()); - - Instant i2 = new Instant(1482652782213L); // 2016-12-25T07:59:42.213Z - expected = "{'i2':'" + formatter.print(i2) + "'}"; - assertResult(expected, () -> builder().startObject().timeField("i2", formatter.print(i2)).endObject()); - assertResult(expected, () -> builder().startObject().field("i2").value(formatter.print(i2)).endObject()); - } - public void testDate() throws Exception { assertResult("{'date':null}", () -> builder().startObject().timeField("date", (Date) null).endObject()); assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((Date) null).endObject()); - final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate(); + final Date d1 = Date.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()); assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1).endObject()); assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timeValue(d1).endObject()); - final Date d2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC).toDate(); + final Date d2 = Date.from(ZonedDateTime.of(2016, 12, 25, 7, 59, 42, 213000000, ZoneOffset.UTC).toInstant()); assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().timeField("d2", d2).endObject()); assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().field("d2").timeValue(d2).endObject()); - - final DateTimeFormatter formatter = randomFrom(ISODateTimeFormat.basicDate(), ISODateTimeFormat.dateTimeNoMillis()); - final Date d3 = DateTime.now().toDate(); - - String expected = "{'d3':'" + formatter.print(d3.getTime()) + "'}"; - assertResult(expected, () -> builder().startObject().field("d3").value(formatter.print(d3.getTime())).endObject()); } public void testDateField() throws Exception { - final Date d = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate(); + final Date d = Date.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()); assertResult("{'date_in_millis':1451606400000}", () -> builder() .startObject() @@ -487,7 +425,7 @@ public void testDateField() throws Exception { } public void testCalendar() throws Exception { - Calendar calendar = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toCalendar(Locale.ROOT); + Calendar calendar = GregorianCalendar.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)); assertResult("{'calendar':'2016-01-01T00:00:00.000Z'}", () -> builder() .startObject() .field("calendar") @@ -653,17 +591,13 @@ public void testObjects() throws Exception { final String paths = Constants.WINDOWS ? "{'objects':['a\\\\b\\\\c','d\\\\e']}" : "{'objects':['a/b/c','d/e']}"; objects.put(paths, new Object[]{PathUtils.get("a", "b", "c"), PathUtils.get("d", "e")}); - final DateTimeFormatter formatter = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER; - final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate(); - final Date d2 = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).toDate(); - objects.put("{'objects':['" + formatter.print(d1.getTime()) + "','" + formatter.print(d2.getTime()) + "']}", new Object[]{d1, d2}); + final DateTimeFormatter formatter = DateTimeFormatter.ISO_INSTANT; + final Date d1 = Date.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()); + final Date d2 = Date.from(ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()); + objects.put("{'objects':['2016-01-01T00:00:00.000Z','2015-01-01T00:00:00.000Z']}", new Object[]{d1, d2}); - final DateTime dt1 = DateTime.now(); - final DateTime dt2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC); - objects.put("{'objects':['" + formatter.print(dt1) + "','2016-12-25T07:59:42.213Z']}", new Object[]{dt1, dt2}); - - final Calendar c1 = new DateTime(2012, 7, 7, 10, 23, DateTimeZone.UTC).toCalendar(Locale.ROOT); - final Calendar c2 = new DateTime(2014, 11, 16, 19, 36, DateTimeZone.UTC).toCalendar(Locale.ROOT); + final Calendar c1 = GregorianCalendar.from(ZonedDateTime.of(2012, 7, 7, 10, 23, 0, 0, ZoneOffset.UTC)); + final Calendar c2 = GregorianCalendar.from(ZonedDateTime.of(2014, 11, 16, 19, 36, 0, 0, ZoneOffset.UTC)); objects.put("{'objects':['2012-07-07T10:23:00.000Z','2014-11-16T19:36:00.000Z']}", new Object[]{c1, c2}); final ToXContent x1 = (builder, params) -> builder.startObject().field("f1", "v1").field("f2", 2).array("f3", 3, 4, 5).endObject(); @@ -701,14 +635,10 @@ public void testObject() throws Exception { final String path = Constants.WINDOWS ? "{'object':'a\\\\b\\\\c'}" : "{'object':'a/b/c'}"; object.put(path, PathUtils.get("a", "b", "c")); - final DateTimeFormatter formatter = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER; - final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate(); - object.put("{'object':'" + formatter.print(d1.getTime()) + "'}", d1); - - final DateTime d2 = DateTime.now(); - object.put("{'object':'" + formatter.print(d2) + "'}", d2); + final Date d1 = Date.from(ZonedDateTime.of(2016, 1, 1, 0, 0, 0,0, ZoneOffset.UTC).toInstant()); + object.put("{'object':'" + "2016-01-01T00:00:00.000Z" + "'}", d1); - final Calendar c1 = new DateTime(2010, 1, 1, 0, 0, DateTimeZone.UTC).toCalendar(Locale.ROOT); + final Calendar c1 = GregorianCalendar.from(ZonedDateTime.of(2010, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)); object.put("{'object':'2010-01-01T00:00:00.000Z'}", c1); final ToXContent x1 = (builder, params) -> builder.startObject().field("f1", "v1").field("f2", 2).array("f3", 3, 4, 5).endObject(); diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 5cf9c24aa2529..8870b4f60ea8c 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -182,7 +182,8 @@ public void testResolveIndexFolders() throws Exception { } for (Map.Entry> actualIndexDataPathEntry : actualIndexDataPaths.entrySet()) { List actual = actualIndexDataPathEntry.getValue(); - assertThat(actual.get(0), equalTo(env.resolveIndexFolder(actualIndexDataPathEntry.getKey()))); + Path[] actualPaths = actual.toArray(new Path[actual.size()]); + assertThat(actualPaths, equalTo(env.resolveIndexFolder(actualIndexDataPathEntry.getKey()))); } assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); env.close(); diff --git a/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java b/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java index 99534d8403bc9..ec9553c415df3 100644 --- a/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java @@ -69,9 +69,10 @@ public void testInvalidIndexFolder() throws Exception { final Settings.Builder settings = Settings.builder().put(indexSettings).put(IndexMetadata.SETTING_INDEX_UUID, uuid); IndexMetadata dangledIndex = IndexMetadata.builder("test1").settings(settings).build(); metaStateService.writeIndex("test_write", dangledIndex); - Path path = env.resolveIndexFolder(uuid); - if (Files.exists(path)) { - Files.move(path, path.resolveSibling("invalidUUID"), StandardCopyOption.ATOMIC_MOVE); + for (Path path : env.resolveIndexFolder(uuid)) { + if (Files.exists(path)) { + Files.move(path, path.resolveSibling("invalidUUID"), StandardCopyOption.ATOMIC_MOVE); + } } final IllegalStateException e = expectThrows(IllegalStateException.class, danglingState::getDanglingIndices); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 72ae9f6976d84..c1b971b3deb92 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -38,10 +38,10 @@ import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.time.Instant; +import java.time.ZoneId; import java.time.ZoneOffset; import java.util.Collections; import java.util.List; @@ -85,8 +85,8 @@ public void isFieldWithinRangeTestCase(DateFieldType ft) throws IOException { DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); doTestIsFieldWithinQuery(ft, reader, null, null); doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat); + doTestIsFieldWithinQuery(ft, reader, ZoneOffset.UTC, null); + doTestIsFieldWithinQuery(ft, reader, ZoneOffset.UTC, alternateFormat); QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis); @@ -99,28 +99,28 @@ public void isFieldWithinRangeTestCase(DateFieldType ft) throws IOException { } private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, - DateTimeZone zone, DateMathParser alternateFormat) throws IOException { + ZoneId zone, DateMathParser alternateFormat) throws IOException { QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> nowInMillis); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", - randomBoolean(), randomBoolean(), null, null, context)); + randomBoolean(), randomBoolean(), zone, null, context)); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20", - randomBoolean(), randomBoolean(), null, null, context)); + randomBoolean(), randomBoolean(), zone, null, context)); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12", - randomBoolean(), randomBoolean(), null, null, context)); + randomBoolean(), randomBoolean(), zone, null, context)); assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12", - randomBoolean(), randomBoolean(), null, null, context)); + randomBoolean(), randomBoolean(), zone, null, context)); assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30", - randomBoolean(), randomBoolean(), null, null, context)); + randomBoolean(), randomBoolean(), zone, null, context)); assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29", - randomBoolean(), randomBoolean(), null, null, context)); + randomBoolean(), randomBoolean(), zone, null, context)); assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - true, true, null, null, context)); + true, true, zone, null, context)); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - false, false, null, null, context)); + false, false, zone, null, context)); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - false, true, null, null, context)); + false, true, zone, null, context)); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - true, false, null, null, context)); + true, false, zone, null, context)); } public void testValueFormat() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 9d213292ddb0d..1149eea891459 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -29,14 +29,16 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.RangeFieldMapper.RangeFieldType; -import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.test.IndexSettingsModule; -import org.joda.time.DateTime; import org.junit.Before; import java.io.IOException; import java.net.InetAddress; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Collections; import java.util.List; import java.util.Map; @@ -100,8 +102,8 @@ public void testRangeQueryIntersectsAdjacentValues() throws Exception { } case DATE: { long fromValue = randomInt(); - from = new DateTime(fromValue); - to = new DateTime(fromValue + 1); + from = ZonedDateTime.ofInstant(Instant.ofEpochMilli(fromValue), ZoneOffset.UTC); + to = ZonedDateTime.ofInstant(Instant.ofEpochMilli(fromValue + 1), ZoneOffset.UTC); break; } case INTEGER: { @@ -157,8 +159,8 @@ public void testFromLargerToErrors() throws Exception { } case DATE: { long fromValue = randomInt(); - from = new DateTime(fromValue); - to = new DateTime(fromValue - 1); + from = ZonedDateTime.ofInstant(Instant.ofEpochMilli(fromValue), ZoneOffset.UTC); + to = ZonedDateTime.ofInstant(Instant.ofEpochMilli(fromValue - 1), ZoneOffset.UTC); break; } case INTEGER: { @@ -284,7 +286,7 @@ public void testDateVsDateRangeBounds() { private Query getExpectedRangeQuery(ShapeRelation relation, Object from, Object to, boolean includeLower, boolean includeUpper) { switch (type) { case DATE: - return getDateRangeQuery(relation, (DateTime)from, (DateTime)to, includeLower, includeUpper); + return getDateRangeQuery(relation, (ZonedDateTime)from, (ZonedDateTime)to, includeLower, includeUpper); case INTEGER: return getIntRangeQuery(relation, (int)from, (int)to, includeLower, includeUpper); case LONG: @@ -298,9 +300,10 @@ private Query getExpectedRangeQuery(ShapeRelation relation, Object from, Object } } - private Query getDateRangeQuery(ShapeRelation relation, DateTime from, DateTime to, boolean includeLower, boolean includeUpper) { - long[] lower = new long[] {from.getMillis() + (includeLower ? 0 : 1)}; - long[] upper = new long[] {to.getMillis() - (includeUpper ? 0 : 1)}; + private Query getDateRangeQuery(ShapeRelation relation, ZonedDateTime from, ZonedDateTime to, + boolean includeLower, boolean includeUpper) { + long[] lower = new long[] {from.toInstant().toEpochMilli() + (includeLower ? 0 : 1)}; + long[] upper = new long[] {to.toInstant().toEpochMilli() - (includeUpper ? 0 : 1)}; Query indexQuery; BinaryDocValuesRangeQuery.QueryType queryType; if (relation == ShapeRelation.WITHIN) { @@ -313,8 +316,8 @@ private Query getDateRangeQuery(ShapeRelation relation, DateTime from, DateTime indexQuery = LongRange.newIntersectsQuery("field", lower, upper); queryType = BinaryDocValuesRangeQuery.QueryType.INTERSECTS; } - Query dvQuery = RangeType.DATE.dvRangeQuery("field", queryType, from.getMillis(), - to.getMillis(), includeLower, includeUpper); + Query dvQuery = RangeType.DATE.dvRangeQuery("field", queryType, from.toInstant().toEpochMilli(), + to.toInstant().toEpochMilli(), includeLower, includeUpper); return new IndexOrDocValuesQuery(indexQuery, dvQuery); } @@ -425,7 +428,7 @@ private Object nextFrom() throws Exception { case INTEGER: return (int)(random().nextInt() * 0.5 - DISTANCE); case DATE: - return DateTime.now(); + return ZonedDateTime.now(ZoneOffset.UTC); case LONG: return (long)(random().nextLong() * 0.5 - DISTANCE); case FLOAT: @@ -442,7 +445,7 @@ private Object nextTo(Object from) throws Exception { case INTEGER: return (Integer)from + DISTANCE; case DATE: - return DateTime.now().plusDays(DISTANCE); + return ZonedDateTime.now(ZoneOffset.UTC).plusDays(DISTANCE); case LONG: return (Long)from + DISTANCE; case DOUBLE: diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index 958106debfb91..0ef30cc129b7c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.index.query.DistanceFeatureQueryBuilder.Origin; import org.elasticsearch.test.AbstractQueryTestCase; -import org.joda.time.DateTime; import java.io.IOException; import java.time.Instant; @@ -41,7 +40,7 @@ protected DistanceFeatureQueryBuilder doCreateTestQueryBuilder() { break; case DATE_FIELD_NAME: long randomDateMills = randomLongBetween(0, 2_000_000_000_000L); - origin = randomBoolean() ? new Origin(randomDateMills) : new Origin(new DateTime(randomDateMills).toString()); + origin = randomBoolean() ? new Origin(randomDateMills) : new Origin(Instant.ofEpochMilli(randomDateMills).toString()); pivot = randomTimeValue(1, 1000, "d", "h", "ms", "s", "m"); break; default: // DATE_NANOS_FIELD_NAME diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 96e595c3b6f08..75cc1bfb2b9f7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -29,8 +29,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.test.AbstractQueryTestCase; -import org.joda.time.DateTime; -import org.joda.time.chrono.ISOChronology; import java.io.IOException; import java.time.Instant; @@ -257,8 +255,8 @@ public void testDateRangeQueryFormat() throws IOException { assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, - DateTime.parse("2012-01-01T00:00:00.000+00").getMillis(), - DateTime.parse("2030-01-01T00:00:00.000+00").getMillis() - 1), + ZonedDateTime.parse("2012-01-01T00:00:00.000+00").toInstant().toEpochMilli(), + ZonedDateTime.parse("2030-01-01T00:00:00.000+00").toInstant().toEpochMilli() - 1), parsedQuery); // Test Invalid format @@ -288,8 +286,8 @@ public void testDateRangeBoundaries() throws IOException { parsedQuery = ((IndexOrDocValuesQuery) parsedQuery).getIndexQuery(); assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, - DateTime.parse("2014-11-01T00:00:00.000+00").getMillis(), - DateTime.parse("2014-12-08T23:59:59.999+00").getMillis()), + ZonedDateTime.parse("2014-11-01T00:00:00.000+00").toInstant().toEpochMilli(), + ZonedDateTime.parse("2014-12-08T23:59:59.999+00").toInstant().toEpochMilli()), parsedQuery); query = "{\n" + @@ -305,8 +303,8 @@ public void testDateRangeBoundaries() throws IOException { parsedQuery = ((IndexOrDocValuesQuery) parsedQuery).getIndexQuery(); assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, - DateTime.parse("2014-11-30T23:59:59.999+00").getMillis() + 1, - DateTime.parse("2014-12-08T00:00:00.000+00").getMillis() - 1), + ZonedDateTime.parse("2014-11-30T23:59:59.999+00").toInstant().toEpochMilli() + 1, + ZonedDateTime.parse("2014-12-08T00:00:00.000+00").toInstant().toEpochMilli() - 1), parsedQuery); } @@ -387,8 +385,8 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC return Relation.WITHIN; } }; - DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); - DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + ZonedDateTime queryFromValue = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + ZonedDateTime queryToValue = ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); query.from(queryFromValue); query.to(queryToValue); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); @@ -422,8 +420,8 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC return Relation.WITHIN; } }; - DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); - DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + ZonedDateTime queryFromValue = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + ZonedDateTime queryToValue = ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); query.from(queryFromValue); query.to(queryToValue); query.timeZone(randomZone().getId()); @@ -447,8 +445,8 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC return Relation.DISJOINT; } }; - DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); - DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + ZonedDateTime queryFromValue = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + ZonedDateTime queryToValue = ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); query.from(queryFromValue); query.to(queryToValue); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); @@ -464,8 +462,8 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC return Relation.INTERSECTS; } }; - DateTime queryFromValue = new DateTime(2015, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); - DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); + ZonedDateTime queryFromValue = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + ZonedDateTime queryToValue = ZonedDateTime.of(2016, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); query.from(queryFromValue); query.to(queryToValue); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index c484635c8acb6..95bd2b0eef61c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -54,10 +54,11 @@ import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; import org.hamcrest.CoreMatchers; import org.hamcrest.Matcher; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -230,7 +231,8 @@ private static DecayFunctionBuilder createRandomDecayFunction() { offset = randomFrom(DistanceUnit.values()).toString(randomDouble()); break; case DATE_FIELD_NAME: - origin = new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString(); + origin = ZonedDateTime.ofInstant( + Instant.ofEpochMilli(System.currentTimeMillis() - randomIntBetween(0, 1000000)), ZoneOffset.UTC).toString(); scale = randomTimeValue(1, 1000, "d", "h", "ms", "s", "m"); offset = randomPositiveTimeValue(); break; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java index 13a9906ec2d39..c4fe9315e6461 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateScriptMocksPlugin.java @@ -10,9 +10,10 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.search.lookup.LeafDocLookup; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.HashMap; import java.util.Map; import java.util.function.Function; @@ -37,18 +38,21 @@ public Map, Object>> pluginScripts() { String fieldname = (String) params.get("fieldname"); return docLookup.get(fieldname); }); - scripts.put( - DOUBLE_PLUS_ONE_MONTH, - params -> new DateTime(Double.valueOf((double) params.get("_value")).longValue(), DateTimeZone.UTC).plusMonths(1).getMillis() - ); - scripts.put(LONG_PLUS_ONE_MONTH, params -> new DateTime((long) params.get("_value"), DateTimeZone.UTC).plusMonths(1).getMillis()); + scripts.put(DOUBLE_PLUS_ONE_MONTH, params -> { + Instant instant = Instant.ofEpochMilli(Double.valueOf((double) params.get("_value")).longValue()); + return ZonedDateTime.ofInstant(instant, ZoneOffset.UTC).plusMonths(1).toInstant().toEpochMilli(); + }); + scripts.put(LONG_PLUS_ONE_MONTH, params -> { + Instant instant = Instant.ofEpochMilli((long) params.get("_value")); + return ZonedDateTime.ofInstant(instant, ZoneOffset.UTC).plusMonths(1).toInstant().toEpochMilli(); + }); return scripts; } @Override protected Map, Object>> nonDeterministicPluginScripts() { Map, Object>> scripts = new HashMap<>(); - scripts.put(CURRENT_DATE, params -> new DateTime().getMillis()); + scripts.put(CURRENT_DATE, params -> ZonedDateTime.now().toInstant().toEpochMilli()); return scripts; } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 23b0ace7e6168..3b1b0198a898e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; @@ -2836,6 +2837,9 @@ private void executeTestCase( config.setIndexSort(indexSort); config.setCodec(TestUtil.getDefaultCodec()); } + if (forceMerge == false) { + config.setMergePolicy(NoMergePolicy.INSTANCE); + } try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) { Document document = new Document(); int id = 0; @@ -2843,10 +2847,13 @@ private void executeTestCase( document.clear(); addToDocument(id, document, fields); indexWriter.addDocument(document); + if (frequently()) { + indexWriter.commit(); + } id++; } - if (forceMerge || rarely()) { - // forceMerge randomly or if the collector-per-leaf testing stuff would break the tests. + if (forceMerge) { + // forceMerge if the collector-per-leaf testing stuff would break the tests. indexWriter.forceMerge(1); } else { if (dataset.size() > 0) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java index cf1fee78fb8a5..ac7e0f54ac0ca 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java @@ -13,9 +13,9 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -33,7 +33,7 @@ public void setUp() throws Exception { super.setUp(); format = randomNumericDocValueFormat(); - Function interval = randomFrom( + Function interval = randomFrom( dateTime -> dateTime.plusSeconds(1), dateTime -> dateTime.plusMinutes(1), dateTime -> dateTime.plusHours(1), @@ -45,13 +45,13 @@ public void setUp() throws Exception { final int numRanges = randomNumberOfBuckets(); final List> listOfRanges = new ArrayList<>(numRanges); - DateTime date = new DateTime(DateTimeZone.UTC); - double start = date.getMillis(); + ZonedDateTime date = ZonedDateTime.now(ZoneOffset.UTC); + double start = date.toInstant().toEpochMilli(); double end = 0; for (int i = 0; i < numRanges; i++) { - double from = date.getMillis(); + double from = date.toInstant().toEpochMilli(); date = interval.apply(date); - double to = date.getMillis(); + double to = date.toInstant().toEpochMilli(); if (to > end) { end = to; } diff --git a/x-pack/docs/en/security/fips-140-compliance.asciidoc b/x-pack/docs/en/security/fips-140-compliance.asciidoc index 85f4f82843f2e..beea826b4d277 100644 --- a/x-pack/docs/en/security/fips-140-compliance.asciidoc +++ b/x-pack/docs/en/security/fips-140-compliance.asciidoc @@ -27,6 +27,12 @@ For {es}, adherence to FIPS 140-2 is ensured by [discrete] === Upgrade considerations +[IMPORTANT] +==== +include::fips-java17.asciidoc[] +==== + + If you plan to upgrade your existing cluster to a version that can be run in a FIPS 140-2 configured JVM, we recommend to first perform a rolling upgrade to the new version in your existing JVM and perform all necessary diff --git a/x-pack/docs/en/security/fips-java17.asciidoc b/x-pack/docs/en/security/fips-java17.asciidoc new file mode 100644 index 0000000000000..b5a99a70526ef --- /dev/null +++ b/x-pack/docs/en/security/fips-java17.asciidoc @@ -0,0 +1,9 @@ +{es} 8.0 requires Java 17 or later. +There is not yet a FIPS-certified security module for Java 17 +that you can use when running {es} 8.0 in FIPS 140-2 mode. +If you run in FIPS 140-2 mode, you will either need to request +an exception from your security organization to upgrade to {es} 8.0, +or remain on {es} 7.x until Java 17 is certified. +ifeval::["{release-state}"=="released"] +Alternatively, consider using {ess} in the FedRAMP-certified GovCloud region. +endif::[] \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index bc5dddd39e63c..1649c2bc6db2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -49,8 +49,6 @@ public enum Feature { SECURITY_AUTHORIZATION_REALM(OperationMode.PLATINUM, true), SECURITY_AUTHORIZATION_ENGINE(OperationMode.PLATINUM, true), - WATCHER(OperationMode.STANDARD, true), - // TODO: should just check WATCHER directly? MONITORING_CLUSTER_ALERTS(OperationMode.STANDARD, true), MONITORING_UPDATE_RETENTION(OperationMode.STANDARD, false), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamsStatsAction.java index ba03c56f2598a..ef7a56c830e5c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamsStatsAction.java @@ -83,7 +83,7 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.field("data_stream_count", dataStreamCount); builder.field("backing_indices", backingIndices); builder.humanReadableField("total_store_size_bytes", "total_store_size", totalStoreSize); - builder.array("data_streams", (Object[]) dataStreams); + builder.xContentList("data_streams", dataStreams); } public int getDataStreamCount() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/GetDataStreamAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/GetDataStreamAction.java index c1c6d7c98fe2c..317a062be1e25 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/GetDataStreamAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/GetDataStreamAction.java @@ -169,7 +169,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(DataStream.NAME_FIELD.getPreferredName(), dataStream.getName()); builder.field(DataStream.TIMESTAMP_FIELD_FIELD.getPreferredName(), dataStream.getTimeStampField()); - builder.field(DataStream.INDICES_FIELD.getPreferredName(), dataStream.getIndices()); + builder.xContentList(DataStream.INDICES_FIELD.getPreferredName(), dataStream.getIndices()); builder.field(DataStream.GENERATION_FIELD.getPreferredName(), dataStream.getGeneration()); if (dataStream.getMetadata() != null) { builder.field(DataStream.METADATA_FIELD.getPreferredName(), dataStream.getMetadata()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java index 957bd27183182..654aac16ccbe2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/ReloadAnalyzersResponse.java @@ -66,8 +66,8 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.startObject(); ReloadDetails value = indexDetails.getValue(); builder.field(INDEX_FIELD.getPreferredName(), value.getIndexName()); - builder.field(RELOADED_ANALYZERS_FIELD.getPreferredName(), value.getReloadedAnalyzers()); - builder.field(RELOADED_NODE_IDS_FIELD.getPreferredName(), value.getReloadedIndicesNodes()); + builder.stringListField(RELOADED_ANALYZERS_FIELD.getPreferredName(), value.getReloadedAnalyzers()); + builder.stringListField(RELOADED_NODE_IDS_FIELD.getPreferredName(), value.getReloadedIndicesNodes()); builder.endObject(); } builder.endArray(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java index 0154d55de91cd..1472197b8136e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocateAction.java @@ -143,9 +143,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (totalShardsPerNode != null) { builder.field(TOTAL_SHARDS_PER_NODE_FIELD.getPreferredName(), totalShardsPerNode); } - builder.field(INCLUDE_FIELD.getPreferredName(), include); - builder.field(EXCLUDE_FIELD.getPreferredName(), exclude); - builder.field(REQUIRE_FIELD.getPreferredName(), require); + builder.stringStringMap(INCLUDE_FIELD.getPreferredName(), include); + builder.stringStringMap(EXCLUDE_FIELD.getPreferredName(), exclude); + builder.stringStringMap(REQUIRE_FIELD.getPreferredName(), require); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java index 26e544915b7fc..ee118827932ca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleFeatureSetUsage.java @@ -64,7 +64,7 @@ public IndexLifecycleFeatureSetUsage(List policyStats) { protected void innerXContent(XContentBuilder builder, Params params) throws IOException { if (policyStats != null) { builder.field("policy_count", policyStats.size()); - builder.field("policy_stats", policyStats); + builder.xContentList("policy_stats", policyStats); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index d36e61535efd2..fa3f2efe0de21 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -95,7 +95,7 @@ public Diff diff(Custom previousState) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(POLICIES_FIELD.getPreferredName(), policyMetadatas); + builder.xContentValuesMap(POLICIES_FIELD.getPreferredName(), policyMetadatas); builder.field(OPERATION_MODE_FIELD.getPreferredName(), operationMode); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadata.java index 79ea7fb235ecc..407bca34b68ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyMetadata.java @@ -101,7 +101,7 @@ public String getModifiedDateString() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(POLICY.getPreferredName(), policy); - builder.field(HEADERS.getPreferredName(), headers); + builder.stringStringMap(HEADERS.getPreferredName(), headers); builder.field(VERSION.getPreferredName(), version); builder.field(MODIFIED_DATE.getPreferredName(), modifiedDate); builder.field(MODIFIED_DATE_STRING.getPreferredName(), getModifiedDateString()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/Phase.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/Phase.java index c1c8d489fd1b1..faadc712218cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/Phase.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/Phase.java @@ -148,7 +148,7 @@ public Map getActions() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(MIN_AGE.getPreferredName(), minimumAge.getStringRep()); - builder.field(ACTIONS_FIELD.getPreferredName(), actions); + builder.xContentValuesMap(ACTIONS_FIELD.getPreferredName(), actions); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java index 3ca6166187af7..85860ff41ee78 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java @@ -94,7 +94,7 @@ List getShardFollowTaskInfos() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(SHARD_FOLLOW_TASKS.getPreferredName(), shardFollowTaskInfos); + builder.xContentList(SHARD_FOLLOW_TASKS.getPreferredName(), shardFollowTaskInfos); String message; if (shardFollowTaskInfos.size() > 0) { message = "Waiting for [" + shardFollowTaskInfos.size() + "] shard follow tasks to be in sync"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java index 1dcf205ca79c9..e9987b2dff8aa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyAction.java @@ -72,7 +72,7 @@ public boolean hasFailures() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(HAS_FAILURES_FIELD.getPreferredName(), hasFailures()); - builder.field(FAILED_INDEXES_FIELD.getPreferredName(), failedIndexes); + builder.stringListField(FAILED_INDEXES_FIELD.getPreferredName(), failedIndexes); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollableIndexCaps.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollableIndexCaps.java index 50f1746c32fb2..e73bfe9d32d80 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollableIndexCaps.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollableIndexCaps.java @@ -64,7 +64,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(indexName); { - builder.field(ROLLUP_JOBS.getPreferredName(), jobCaps); + builder.xContentList(ROLLUP_JOBS.getPreferredName(), jobCaps); } builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java index ea87eb0b1ca12..4de41df47050f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java @@ -107,7 +107,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(FIELDS.getPreferredName()); { for (Map.Entry fieldCap : fieldCapLookup.entrySet()) { - builder.array(fieldCap.getKey(), fieldCap.getValue()); + builder.xContentList(fieldCap.getKey(), fieldCap.getValue()); } } builder.endObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java index e1109f948e167..7d705e71cbc66 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java @@ -139,7 +139,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.startObject(); { builder.field(FIELD, field); - builder.field(METRICS, metrics); + builder.stringListField(METRICS, metrics); } return builder.endObject(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index ae214c5c5f88c..5bf5693232bdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -246,8 +246,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea builder.field(Fields.GLOBAL.getPreferredName()); ConfigurableClusterPrivileges.toXContent(builder, params, Arrays.asList(configurableClusterPrivileges)); } - builder.array(Fields.INDICES.getPreferredName(), (Object[]) indicesPrivileges); - builder.array(Fields.APPLICATIONS.getPreferredName(), (Object[]) applicationPrivileges); + builder.xContentList(Fields.INDICES.getPreferredName(), indicesPrivileges); + builder.xContentList(Fields.APPLICATIONS.getPreferredName(), applicationPrivileges); if (runAs != null) { builder.array(Fields.RUN_AS.getPreferredName(), runAs); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/NodeTermsEnumRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/NodeTermsEnumRequest.java index 08fe633a3fedb..044968d62da9a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/NodeTermsEnumRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/NodeTermsEnumRequest.java @@ -6,7 +6,9 @@ */ package org.elasticsearch.xpack.core.termsenum.action; +import org.elasticsearch.Version; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -26,22 +28,26 @@ */ public class NodeTermsEnumRequest extends TransportRequest implements IndicesRequest { - private String field; - private String string; - private String searchAfter; - private long taskStartedTimeMillis; - private long nodeStartedTimeMillis; - private boolean caseInsensitive; - private int size; - private long timeout; + private final String field; + private final String string; + private final String searchAfter; + private final long taskStartedTimeMillis; + private final boolean caseInsensitive; + private final int size; + private final long timeout; private final QueryBuilder indexFilter; - private Set shardIds; - private String nodeId; + private final Set shardIds; + private final String nodeId; + private final OriginalIndices originalIndices; + + private long nodeStartedTimeMillis; - public NodeTermsEnumRequest(final String nodeId, + public NodeTermsEnumRequest(OriginalIndices originalIndices, + final String nodeId, final Set shardIds, TermsEnumRequest request, long taskStartTimeMillis) { + this.originalIndices = originalIndices; this.field = request.field(); this.string = request.string(); this.searchAfter = request.searchAfter(); @@ -70,6 +76,15 @@ public NodeTermsEnumRequest(StreamInput in) throws IOException { for (int i = 0; i < numShards; i++) { shardIds.add(new ShardId(in)); } + if (in.getVersion().onOrAfter(Version.V_7_15_1)) { + originalIndices = OriginalIndices.readOriginalIndices(in); + } else { + String[] indicesNames = shardIds.stream() + .map(ShardId::getIndexName) + .distinct() + .toArray(String[]::new); + this.originalIndices = new OriginalIndices(indicesNames, null); + } } @Override @@ -92,6 +107,9 @@ public void writeTo(StreamOutput out) throws IOException { for (ShardId shardId : shardIds) { shardId.writeTo(out); } + if (out.getVersion().onOrAfter(Version.V_7_15_1)) { + OriginalIndices.writeOriginalIndices(originalIndices, out); + } } public String field() { @@ -152,16 +170,12 @@ public QueryBuilder indexFilter() { @Override public String[] indices() { - HashSet indicesNames = new HashSet<>(); - for (ShardId shardId : shardIds) { - indicesNames.add(shardId.getIndexName()); - } - return indicesNames.toArray(new String[0]); + return originalIndices.indices(); } @Override public IndicesOptions indicesOptions() { - return null; + return originalIndices.indicesOptions(); } public boolean remove(ShardId shardId) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java index f88f01f5927c8..c0a203815bf18 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java @@ -124,7 +124,7 @@ public TransportTermsEnumAction( this.scriptService = scriptService; this.licenseState = licenseState; this.settings = settings; - this.remoteClusterService = searchTransportService.getRemoteClusterService();; + this.remoteClusterService = searchTransportService.getRemoteClusterService(); transportService.registerRequestHandler( transportShardAction, @@ -140,7 +140,8 @@ protected void doExecute(Task task, TermsEnumRequest request, ActionListener shardIds, TermsEnumRequest request, long taskStartMillis) { @@ -149,14 +150,14 @@ protected NodeTermsEnumRequest newNodeRequest(final String nodeId, // final ClusterState clusterState = clusterService.state(); // final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); // final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases); - return new NodeTermsEnumRequest(nodeId, shardIds, request, taskStartMillis); + return new NodeTermsEnumRequest(originalIndices, nodeId, shardIds, request, taskStartMillis); } protected NodeTermsEnumResponse readShardResponse(StreamInput in) throws IOException { return new NodeTermsEnumResponse(in); } - protected Map> getNodeBundles(ClusterState clusterState, TermsEnumRequest request, String[] concreteIndices) { + protected Map> getNodeBundles(ClusterState clusterState, String[] concreteIndices) { // Group targeted shards by nodeId Map> fastNodeBundles = new HashMap<>(); for (String indexName : concreteIndices) { @@ -166,9 +167,7 @@ protected Map> getNodeBundles(ClusterState clusterState, Te GroupShardsIterator shards = clusterService.operationRouting() .searchShards(clusterState, singleIndex, null, null); - Iterator shardsForIndex = shards.iterator(); - while (shardsForIndex.hasNext()) { - ShardIterator copiesOfShard = shardsForIndex.next(); + for (ShardIterator copiesOfShard : shards) { ShardRouting selectedCopyOfShard = null; for (ShardRouting copy : copiesOfShard) { // Pick the first active node with a copy of the shard @@ -181,7 +180,7 @@ protected Map> getNodeBundles(ClusterState clusterState, Te break; } String nodeId = selectedCopyOfShard.currentNodeId(); - Set bundle = null; + final Set bundle; if (fastNodeBundles.containsKey(nodeId)) { bundle = fastNodeBundles.get(nodeId); } else { @@ -392,7 +391,7 @@ protected NodeTermsEnumResponse dataNodeOperation(NodeTermsEnumRequest request, if (termsList.size() >= shard_size) { break; } - }; + } } catch (Exception e) { error = ExceptionsHelper.stackTrace(e); @@ -418,7 +417,7 @@ private boolean canAccess( if (indexAccessControl != null) { final boolean dls = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); - if ( dls && licenseChecker.get()) { + if (dls && licenseChecker.get()) { // Check to see if any of the roles defined for the current user rewrite to match_all SecurityContext securityContext = new SecurityContext(clusterService.getSettings(), threadContext); @@ -469,12 +468,12 @@ protected class AsyncBroadcastAction { private final Task task; private final TermsEnumRequest request; private ActionListener listener; - private final ClusterState clusterState; private final DiscoveryNodes nodes; private final int expectedOps; private final AtomicInteger counterOps = new AtomicInteger(); private final AtomicReferenceArray atomicResponses; private final Map> nodeBundles; + private final OriginalIndices localIndices; private final Map remoteClusterIndices; protected AsyncBroadcastAction(Task task, TermsEnumRequest request, ActionListener listener) { @@ -482,7 +481,7 @@ protected AsyncBroadcastAction(Task task, TermsEnumRequest request, ActionListen this.request = request; this.listener = listener; - clusterState = clusterService.state(); + ClusterState clusterState = clusterService.state(); ClusterBlockException blockException = checkGlobalBlock(clusterState, request); if (blockException != null) { @@ -490,7 +489,7 @@ protected AsyncBroadcastAction(Task task, TermsEnumRequest request, ActionListen } this.remoteClusterIndices = remoteClusterService.groupIndices(request.indicesOptions(), request.indices()); - OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + this.localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); // update to concrete indices String[] concreteIndices = localIndices == null ? new String[0] : @@ -502,7 +501,7 @@ protected AsyncBroadcastAction(Task task, TermsEnumRequest request, ActionListen nodes = clusterState.nodes(); logger.trace("resolving shards based on cluster state version [{}]", clusterState.version()); - nodeBundles = getNodeBundles(clusterState, request, concreteIndices); + nodeBundles = getNodeBundles(clusterState, concreteIndices); expectedOps = nodeBundles.size() + remoteClusterIndices.size(); atomicResponses = new AtomicReferenceArray<>(expectedOps); @@ -557,7 +556,7 @@ protected void performOperation(final String nodeId, final Set shardIds onNodeFailure(nodeId, opsIndex, null); } else { try { - final NodeTermsEnumRequest nodeRequest = newNodeRequest(nodeId, shardIds, request, task.getStartTime()); + final NodeTermsEnumRequest nodeRequest = newNodeRequest(localIndices, nodeId, shardIds, request, task.getStartTime()); nodeRequest.setParentTask(clusterService.localNode().getId(), task.getId()); DiscoveryNode node = nodes.get(nodeId); if (node == null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherConstants.java new file mode 100644 index 0000000000000..f598bc5840d97 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/WatcherConstants.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.watcher; + +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; + +public class WatcherConstants { + + public static final LicensedFeature.Momentary WATCHER_FEATURE = + LicensedFeature.momentary(null, "watcher", License.OperationMode.STANDARD); + + // no construction + private WatcherConstants() {} +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/throttler/ActionThrottler.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/throttler/ActionThrottler.java index 99792f2f5cfdc..f5dfe118ee62c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/throttler/ActionThrottler.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/throttler/ActionThrottler.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.watcher.WatcherConstants; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; import java.time.Clock; @@ -39,7 +40,7 @@ public TimeValue throttlePeriod() { @Override public Result throttle(String actionId, WatchExecutionContext ctx) { - if (licenseState.checkFeature(XPackLicenseState.Feature.WATCHER) == false) { + if (WatcherConstants.WATCHER_FEATURE.check(licenseState) == false) { return Result.throttle(LICENSE, "watcher license does not allow action execution"); } if (periodThrottler != null) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/watcher/actions/throttler/WatchThrottlerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/watcher/actions/throttler/WatchThrottlerTests.java index a679f74b6631c..7ad379664ddf9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/watcher/actions/throttler/WatchThrottlerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/watcher/actions/throttler/WatchThrottlerTests.java @@ -6,9 +6,9 @@ */ package org.elasticsearch.xpack.core.watcher.actions.throttler; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.watcher.WatcherConstants; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; import static org.hamcrest.Matchers.is; @@ -25,8 +25,8 @@ public void testThrottleDueToAck() throws Exception { when(periodThrottler.throttle("_action", ctx)).thenReturn(Throttler.Result.NO); Throttler.Result expectedResult = Throttler.Result.throttle(Throttler.Type.ACK, "_reason"); when(ackThrottler.throttle("_action", ctx)).thenReturn(expectedResult); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.WATCHER)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(WatcherConstants.WATCHER_FEATURE)).thenReturn(true); ActionThrottler throttler = new ActionThrottler(periodThrottler, ackThrottler, licenseState); Throttler.Result result = throttler.throttle("_action", ctx); assertThat(result, notNullValue()); @@ -40,8 +40,8 @@ public void testThrottleDueToPeriod() throws Exception { Throttler.Result expectedResult = Throttler.Result.throttle(Throttler.Type.PERIOD, "_reason"); when(periodThrottler.throttle("_action", ctx)).thenReturn(expectedResult); when(ackThrottler.throttle("_action", ctx)).thenReturn(Throttler.Result.NO); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.WATCHER)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(WatcherConstants.WATCHER_FEATURE)).thenReturn(true); ActionThrottler throttler = new ActionThrottler(periodThrottler, ackThrottler, licenseState); Throttler.Result result = throttler.throttle("_action", ctx); assertThat(result, notNullValue()); @@ -56,8 +56,8 @@ public void testThrottleDueAckAndPeriod() throws Exception { when(periodThrottler.throttle("_action", ctx)).thenReturn(periodResult); Throttler.Result ackResult = Throttler.Result.throttle(Throttler.Type.ACK, "_reason_ack"); when(ackThrottler.throttle("_action", ctx)).thenReturn(ackResult); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.WATCHER)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(WatcherConstants.WATCHER_FEATURE)).thenReturn(true); ActionThrottler throttler = new ActionThrottler(periodThrottler, ackThrottler, licenseState); Throttler.Result result = throttler.throttle("_action", ctx); assertThat(result, notNullValue()); @@ -71,8 +71,8 @@ public void testNoThrottle() throws Exception { WatchExecutionContext ctx = mock(WatchExecutionContext.class); when(periodThrottler.throttle("_action", ctx)).thenReturn(Throttler.Result.NO); when(ackThrottler.throttle("_action", ctx)).thenReturn(Throttler.Result.NO); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.WATCHER)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(WatcherConstants.WATCHER_FEATURE)).thenReturn(true); ActionThrottler throttler = new ActionThrottler(periodThrottler, ackThrottler, licenseState); Throttler.Result result = throttler.throttle("_action", ctx); assertThat(result, notNullValue()); @@ -84,8 +84,8 @@ public void testWithoutPeriod() throws Exception { WatchExecutionContext ctx = mock(WatchExecutionContext.class); Throttler.Result ackResult = mock(Throttler.Result.class); when(ackThrottler.throttle("_action", ctx)).thenReturn(ackResult); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.WATCHER)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(WatcherConstants.WATCHER_FEATURE)).thenReturn(true); ActionThrottler throttler = new ActionThrottler(null, ackThrottler, licenseState); Throttler.Result result = throttler.throttle("_action", ctx); assertThat(result, notNullValue()); @@ -97,8 +97,8 @@ public void testThatRestrictedLicenseReturnsCorrectResult() throws Exception { WatchExecutionContext ctx = mock(WatchExecutionContext.class); Throttler.Result ackResult = mock(Throttler.Result.class); when(ackThrottler.throttle("_action", ctx)).thenReturn(ackResult); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.WATCHER)).thenReturn(false); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(WatcherConstants.WATCHER_FEATURE)).thenReturn(false); ActionThrottler throttler = new ActionThrottler(null, ackThrottler, licenseState); Throttler.Result result = throttler.throttle("_action", ctx); assertThat(result, notNullValue()); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java index 70e80973e67b4..fb9fe22921e29 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryItem.java @@ -82,7 +82,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.field(SUCCESS.getPreferredName(), success); if (executionState != null) { - builder.field(EXECUTION_STATE.getPreferredName(), executionState.asMap()); + builder.stringStringMap(EXECUTION_STATE.getPreferredName(), executionState.asMap()); } if (errorDetails != null) { builder.field(ERROR.getPreferredName(), errorDetails); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 50688b621c0ff..17efb21fb23d2 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -168,6 +168,7 @@ public List> getSettings() { settings.add(EnrichStatsCollector.STATS_TIMEOUT); settings.addAll(Exporters.getSettings()); settings.add(Monitoring.MIGRATION_DECOMMISSION_ALERTS); + settings.addAll(MonitoringDeprecatedSettings.getSettings()); return Collections.unmodifiableList(settings); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringDeprecatedSettings.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringDeprecatedSettings.java new file mode 100644 index 0000000000000..3b6a5370a5151 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringDeprecatedSettings.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.monitoring; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.core.TimeValue; + +import java.util.Arrays; +import java.util.List; + +/** + * A collection of settings that are marked as deprecated and soon to be removed. These settings have been moved here because the features + * that make use of them have been removed from the code. Their removals can be enacted after the standard deprecation period has completed. + */ +public final class MonitoringDeprecatedSettings { + private MonitoringDeprecatedSettings() {} + + // =================== + // Deprecated in 7.16: + public static final Setting.AffixSetting TEMPLATE_CREATE_LEGACY_VERSIONS_SETTING = + Setting.affixKeySetting("xpack.monitoring.exporters.","index.template.create_legacy_templates", + (key) -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope, Property.Deprecated)); + public static final Setting.AffixSetting USE_INGEST_PIPELINE_SETTING = + Setting.affixKeySetting("xpack.monitoring.exporters.","use_ingest", + key -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope, Property.Deprecated)); + public static final Setting.AffixSetting PIPELINE_CHECK_TIMEOUT_SETTING = + Setting.affixKeySetting("xpack.monitoring.exporters.","index.pipeline.master_timeout", + (key) -> Setting.timeSetting(key, TimeValue.MINUS_ONE, Property.Dynamic, Property.NodeScope, Property.Deprecated)); + // =================== + + public static List> getSettings() { + return Arrays.asList(TEMPLATE_CREATE_LEGACY_VERSIONS_SETTING, USE_INGEST_PIPELINE_SETTING, PIPELINE_CHECK_TIMEOUT_SETTING); + } + +} diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java index 42452cc6fc7ea..37d8f7824fda1 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java @@ -40,6 +40,7 @@ public void setup() { cleanerService.setGlobalRetention(TimeValue.MAX_VALUE); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78737") public void testNothingToDelete() throws Exception { CleanerService.Listener listener = getListener(); listener.onCleanUpIndices(days(0)); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java index 97a35d42f1367..d2d3953653b2b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java @@ -119,6 +119,7 @@ public void testInvalidVersionBlocks() { verifyNoMoreInteractions(client); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78736") public void testTemplateCheckBlocksAfterSuccessfulVersion() { final Exception exception = failureGetException(); final boolean firstSucceeds = randomBoolean(); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java index 390743f0584d8..ca55aa2ea5284 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java @@ -184,7 +184,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(rollupIndexName); - builder.field(ROLLUP_JOBS.getPreferredName(), jobCaps); + builder.xContentList(ROLLUP_JOBS.getPreferredName(), jobCaps); builder.endObject(); return builder; } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java index eceb906ccb4f0..31d1125a91fa2 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/v2/TransportRollupAction.java @@ -316,7 +316,7 @@ private static XContentBuilder getProperties(XContentBuilder builder, RollupActi String defaultMetric = metrics.contains("value_count") ? "value_count" : metrics.get(0); builder.startObject(metricConfig.getField()) .field("type", AggregateDoubleMetricFieldMapper.CONTENT_TYPE) - .array(AggregateDoubleMetricFieldMapper.Names.METRICS, metrics.toArray()) + .stringListField(AggregateDoubleMetricFieldMapper.Names.METRICS, metrics) .field(AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC, defaultMetric) .endObject(); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index bbf471cc5f522..ce6aad81bface 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -59,12 +59,12 @@ import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.junit.Before; import java.io.IOException; import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -137,7 +137,7 @@ public void testSimpleDateHisto() throws Exception { "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -158,7 +158,7 @@ public void testSimpleDateHisto() throws Exception { "the_histo.date_histogram._count", 1, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -221,7 +221,7 @@ public void testDateHistoAndMetrics() throws Exception { "counter.sum.value", 50.0, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -252,7 +252,7 @@ public void testDateHistoAndMetrics() throws Exception { "counter.sum.value", 141.0, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -283,7 +283,7 @@ public void testDateHistoAndMetrics() throws Exception { "counter.sum.value", 275.0, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -314,7 +314,7 @@ public void testDateHistoAndMetrics() throws Exception { "counter.sum.value", 270.0, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -345,7 +345,7 @@ public void testDateHistoAndMetrics() throws Exception { "counter.sum.value", 440.0, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -398,7 +398,7 @@ public void testSimpleDateHistoWithDelay() throws Exception { "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -419,7 +419,7 @@ public void testSimpleDateHistoWithDelay() throws Exception { "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -440,7 +440,7 @@ public void testSimpleDateHistoWithDelay() throws Exception { "the_histo.date_histogram._count", 1, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -493,7 +493,7 @@ public void testSimpleDateHistoWithOverlappingDelay() throws Exception { "the_histo.date_histogram._count", 3, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -514,7 +514,7 @@ public void testSimpleDateHistoWithOverlappingDelay() throws Exception { "the_histo.date_histogram._count", 4, "the_histo.date_histogram.time_zone", - DateTimeZone.UTC.toString(), + "UTC", // TODO: the default is hardcoded from Joda, we should change this "_rollup.id", job.getId() ) @@ -538,7 +538,7 @@ public void testSimpleDateHistoWithTimeZone() throws Exception { ) ); - String timeZone = DateTimeZone.forOffsetHours(-3).getID(); + String timeZone = ZoneOffset.ofHours(-3).getId(); String rollupIndex = randomAlphaOfLengthBetween(5, 10); String field = "the_histo"; DateHistogramGroupConfig dateHistoConfig = new CalendarInterval(field, new DateHistogramInterval("1d"), null, timeZone); @@ -633,7 +633,11 @@ public void testRandomizedDateHisto() throws Exception { for (int i = 0; i < numDocs; i++) { // Make sure the timestamp is sufficiently in the past that we don't get bitten // by internal rounding, causing no docs to match - long timestamp = new DateTime().minusDays(2).minusHours(randomIntBetween(11, 100)).getMillis(); + long timestamp = ZonedDateTime.now(ZoneOffset.UTC) + .minusDays(2) + .minusHours(randomIntBetween(11, 100)) + .toInstant() + .toEpochMilli(); dataset.add(asMap(timestampField, timestamp, valueField, randomLongBetween(1, 100))); } executeTestCase(dataset, job, System.currentTimeMillis(), (resp) -> { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexNameResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexNameResolver.java index b4899197c340f..f9d5498ee5066 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexNameResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexNameResolver.java @@ -6,9 +6,9 @@ */ package org.elasticsearch.xpack.security.audit.index; -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Locale; public class IndexNameResolver { @@ -21,7 +21,7 @@ public enum Rollover { private final DateTimeFormatter formatter; Rollover(String format) { - this.formatter = DateTimeFormat.forPattern(format); + this.formatter = DateTimeFormatter.ofPattern(format, Locale.ROOT); } DateTimeFormatter formatter() { @@ -31,11 +31,11 @@ DateTimeFormatter formatter() { private IndexNameResolver() {} - public static String resolve(DateTime timestamp, Rollover rollover) { - return rollover.formatter().print(timestamp); + public static String resolve(ZonedDateTime timestamp, Rollover rollover) { + return rollover.formatter().format(timestamp); } - public static String resolve(String indexNamePrefix, DateTime timestamp, Rollover rollover) { + public static String resolve(String indexNamePrefix, ZonedDateTime timestamp, Rollover rollover) { return indexNamePrefix + resolve(timestamp, rollover); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index 3fa2c5a047f84..6ebfacfad0481 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -1067,7 +1067,7 @@ private void withRoleDescriptor(XContentBuilder builder, RoleDescriptor roleDesc } builder.endArray(); // the toXContent method of the {@code RoleDescriptor.ApplicationResourcePrivileges) does a good job - builder.array(RoleDescriptor.Fields.APPLICATIONS.getPreferredName(), (Object[]) roleDescriptor.getApplicationPrivileges()); + builder.xContentList(RoleDescriptor.Fields.APPLICATIONS.getPreferredName(), roleDescriptor.getApplicationPrivileges()); builder.array(RoleDescriptor.Fields.RUN_AS.getPreferredName(), roleDescriptor.getRunAs()); if (roleDescriptor.getMetadata() != null && false == roleDescriptor.getMetadata().isEmpty()) { // JSON building for the metadata might fail when encountering unknown class types. diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 0973c7bc3e5af..5bdd59bed1f77 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -70,19 +70,20 @@ import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.test.SecurityTestUtils; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; import org.junit.Before; import java.time.Clock; import java.time.Duration; import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; @@ -1418,8 +1419,8 @@ public void testUnauthorizedDateMathExpressionIgnoreUnavailableDisallowNoIndices } public void testUnauthorizedDateMathExpressionStrict() { - String expectedIndex = "datetime-" + DateTimeFormat.forPattern("YYYY.MM.dd").print( - new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy()); + String expectedIndex = "datetime-" + DateTimeFormatter.ofPattern("uuuu.MM.dd", Locale.ROOT).format( + ZonedDateTime.now(ZoneOffset.UTC).withDayOfMonth(1)); SearchRequest request = new SearchRequest(""); request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, @@ -1460,8 +1461,8 @@ public void testMissingDateMathExpressionIgnoreUnavailableDisallowNoIndices() { } public void testMissingDateMathExpressionStrict() { - String expectedIndex = "foobar-" + DateTimeFormat.forPattern("YYYY.MM.dd").print( - new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy()); + String expectedIndex = "foobar-" + DateTimeFormatter.ofPattern("uuuu.MM.dd", Locale.ROOT).format( + ZonedDateTime.now(ZoneOffset.UTC).withDayOfMonth(1)); SearchRequest request = new SearchRequest(""); request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index c377e155a5573..f0533624ac927 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -85,11 +85,12 @@ import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; import org.hamcrest.Matchers; -import org.joda.time.DateTime; import java.io.IOException; import java.time.Clock; import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -1585,7 +1586,7 @@ public void writeTo(StreamOutput out) throws IOException { } private String getAuditLogName() { - final DateTime date = new DateTime().plusDays(randomIntBetween(1, 360)); + final ZonedDateTime date = ZonedDateTime.now(ZoneOffset.UTC).plusDays(randomIntBetween(1, 360)); final IndexNameResolver.Rollover rollover = randomFrom(IndexNameResolver.Rollover.values()); return IndexNameResolver.resolve(IndexAuditTrailField.INDEX_NAME_PREFIX, date, rollover); } diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle index 2c3a20edcf4a0..12ced73d25210 100644 --- a/x-pack/plugin/sql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -21,8 +21,7 @@ testClusters.configureEach { tasks.named("integTest").configure{ enabled = false} // A bug (https://github.com/elastic/elasticsearch/issues/68439) limits us to perform tests with versions from 7.10.3 onwards - -BuildParams.bwcVersions.withWireCompatiple(v -> v.onOrAfter("7.10.0") && +BuildParams.bwcVersions.withWireCompatiple(v -> v.onOrAfter("7.10.3") && v != VersionProperties.getElasticsearchVersion()) { bwcVersion, baseName -> def baseCluster = testClusters.register(baseName) { diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java index 0759f1b4ce24d..1b8864e5ff58b 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java @@ -12,6 +12,9 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.core.internal.io.IOUtils; @@ -123,8 +126,8 @@ private List runOrderByNullsLastQuery(RestClient queryClient) throws IO indexDocs.setJsonEntity(bulk.toString()); client().performRequest(indexDocs); - Request query = new Request("GET", "_sql"); - query.setJsonEntity("{\"query\":\"SELECT int FROM test GROUP BY 1 ORDER BY 1 NULLS LAST\"}"); + Request query = new Request("POST", "_sql"); + query.setJsonEntity(sqlQueryEntityWithOptionalMode("SELECT int FROM test GROUP BY 1 ORDER BY 1 NULLS LAST", bwcVersion)); Response queryResponse = queryClient.performRequest(query); assertEquals(200, queryResponse.getStatusLine().getStatusCode()); @@ -135,4 +138,21 @@ private List runOrderByNullsLastQuery(RestClient queryClient) throws IO return rows.stream().map(row -> (Integer) row.get(0)).collect(Collectors.toList()); } + public static String sqlQueryEntityWithOptionalMode(String query, Version bwcVersion) throws IOException { + XContentBuilder json = XContentFactory.jsonBuilder().startObject(); + json.field("query", query); + if (bwcVersion.before(Version.V_7_12_0)) { + // a bug previous to 7.12 caused a NullPointerException when accessing displaySize in ColumnInfo. The bug has been addressed in + // https://github.com/elastic/elasticsearch/pull/68802/files + // #diff-2faa4e2df98a4636300a19d9d890a1bd7174e9b20dd3a8589d2c78a3d9e5cbc0L110 + // as a workaround, use JDBC (driver) mode in versions prior to 7.12 + json.field("mode", "jdbc"); + json.field("binary_format", false); + json.field("version", bwcVersion.toString()); + } + json.endObject(); + + return Strings.toString(json); + } + } diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java index 284a758a508e2..026da06c46eba 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java @@ -38,7 +38,6 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.xpack.ql.TestUtils.buildNodeAndVersions; import static org.elasticsearch.xpack.ql.TestUtils.readResource; -import static org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder.SWITCH_TO_FIELDS_API_VERSION; public class SqlSearchIT extends ESRestTestCase { @@ -56,9 +55,7 @@ public class SqlSearchIT extends ESRestTestCase { private static List newNodes; private static List bwcNodes; private static Version bwcVersion; - private static Version newVersion; private static boolean isBwcNodeBeforeFieldsApiInQL; - private static boolean isBwcNodeBeforeFieldsApiInES; @Before public void createIndex() throws IOException { @@ -68,9 +65,7 @@ public void createIndex() throws IOException { newNodes = new ArrayList<>(nodes.getNewNodes()); bwcNodes = new ArrayList<>(nodes.getBWCNodes()); bwcVersion = nodes.getBWCNodes().get(0).getVersion(); - newVersion = nodes.getNewNodes().get(0).getVersion(); isBwcNodeBeforeFieldsApiInQL = bwcVersion.before(FIELDS_API_QL_INTRODUCTION); - isBwcNodeBeforeFieldsApiInES = bwcVersion.before(SWITCH_TO_FIELDS_API_VERSION); String mappings = readResource(SqlSearchIT.class.getResourceAsStream("/all_field_types.json")); createIndex( @@ -142,7 +137,7 @@ public void testAllTypesWithRequestToUpgradedNodes() throws Exception { (builder, fieldValues) -> { Float randomFloat = randomFloat(); builder.append(","); - if (isBwcNodeBeforeFieldsApiInQL && isBwcNodeBeforeFieldsApiInES) { + if (isBwcNodeBeforeFieldsApiInQL) { builder.append("\"geo_point_field\":{\"lat\":\"37.386483\", \"lon\":\"-122.083843\"},"); fieldValues.put("geo_point_field", "POINT (-122.08384302444756 37.38648299127817)"); builder.append("\"float_field\":" + randomFloat + ","); @@ -256,20 +251,38 @@ private void assertAllTypesWithNodes(Map expectedResponse, List< ) { @SuppressWarnings("unchecked") List> columns = (List>) expectedResponse.get("columns"); + String intervalYearMonth = "INTERVAL '150' YEAR AS interval_year, "; String intervalDayTime = "INTERVAL '163' MINUTE AS interval_minute, "; - // get all fields names from the expected response built earlier, skipping the intervals as they execute locally // and not taken from the index itself - String fieldsList = columns.stream().map(m -> (String) m.get("name")).filter(str -> str.startsWith("interval") == false) - .collect(Collectors.toList()).stream().collect(Collectors.joining(", ")); + String fieldsList = columns.stream() + .map(m -> (String) m.get("name")) + .filter(str -> str.startsWith("interval") == false) + .collect(Collectors.toList()) + .stream() + .collect(Collectors.joining(", ")); String query = "SELECT " + intervalYearMonth + intervalDayTime + fieldsList + " FROM " + index + " ORDER BY id"; + Request request = new Request("POST", "_sql"); - request.setJsonEntity("{\"query\":\"" + query + "\"}"); - assertBusy(() -> { assertResponse(expectedResponse, runSql(client, request)); }); + request.setJsonEntity(SqlCompatIT.sqlQueryEntityWithOptionalMode(query, bwcVersion)); + assertBusy(() -> { + assertResponse(expectedResponse, dropDisplaySizes(runSql(client, request))); + }); } } + private Map dropDisplaySizes(Map response) { + // in JDBC mode, display_size will be part of the response, so remove it because it's not part of the expected response + @SuppressWarnings("unchecked") + List> columns = (List>) response.get("columns"); + List> columnsWithoutDisplaySizes = columns.stream() + .peek(column -> column.remove("display_size")) + .collect(Collectors.toList()); + response.put("columns", columnsWithoutDisplaySizes); + return response; + } + private void assertResponse(Map expected, Map actual) { if (false == expected.equals(actual)) { NotEqualMessageBuilder message = new NotEqualMessageBuilder(); diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java index bfbd110688eb9..c7780841bf416 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java @@ -6,19 +6,14 @@ */ package org.elasticsearch.xpack.sql.action; -import java.io.IOException; -import java.time.ZonedDateTime; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; - +import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.ql.async.QlStatusResponse; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.proto.Mode; @@ -26,6 +21,12 @@ import org.elasticsearch.xpack.sql.proto.SqlVersion; import org.elasticsearch.xpack.sql.proto.StringUtils; +import java.io.IOException; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + import static java.util.Collections.unmodifiableList; import static org.elasticsearch.Version.CURRENT; import static org.elasticsearch.xpack.sql.action.AbstractSqlQueryRequest.CURSOR; @@ -81,10 +82,16 @@ public SqlQueryResponse(StreamInput in) throws IOException { } } this.rows = unmodifiableList(rows); - columnar = in.readBoolean(); - asyncExecutionId = in.readOptionalString(); - isPartial = in.readBoolean(); - isRunning = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_7_14_0)) { + columnar = in.readBoolean(); + asyncExecutionId = in.readOptionalString(); + isPartial = in.readBoolean(); + isRunning = in.readBoolean(); + } else { + asyncExecutionId = null; + isPartial = false; + isRunning = false; + } } public SqlQueryResponse( diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 6acc35392a53d..3a4b18dfe1200 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -6,7 +6,7 @@ setup: - do: allowed_warnings: - - "index template [tsdbds-template1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template1] will take precedence during new index creation" + - "index template [my-template1] has index patterns [k8s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template1] will take precedence during new index creation" indices.put_index_template: name: my-template1 body: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml index 7fdfd4ec55511..9e6adf86b63ec 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml @@ -1,8 +1,8 @@ --- setup: - skip: - version: " - 7.9.99" - reason: TODO temporary version skip while we wait for backport to 7.x of search_after properties + version: " - 7.15.0" + reason: original indices are propagated correctly in 7.15.1 features: headers - do: cluster.health: @@ -38,6 +38,16 @@ setup: ] } + - do: + security.put_role: + name: "dls_alias_role" + body: > + { + "indices": [ + { "names": ["alias_security"], "privileges": ["read"], "query": "{\"term\": {\"ck\": \"const\"}}" } + ] + } + - do: security.put_role: name: "dls_none_role" @@ -57,6 +67,16 @@ setup: "full_name" : "user with access to all docs in test_security index (using DLS)" } + - do: + security.put_user: + username: "dls_alias_user" + body: > + { + "password" : "x-pack-test-password", + "roles" : [ "dls_alias_role" ], + "full_name" : "user with access to all docs in test_security index (using DLS)" + } + - do: security.put_role: name: "dls_some_role" @@ -143,6 +163,8 @@ setup: indices.create: index: test_security body: + aliases: + alias_security: {} settings: index: number_of_shards: 1 @@ -198,6 +220,16 @@ teardown: security.delete_role: name: "dls_all_role" ignore: 404 + + - do: + security.delete_user: + username: "dls_alias_user" + ignore: 404 + + - do: + security.delete_role: + name: "dls_alias_role" + ignore: 404 - do: security.delete_role: name: "dls_none_role" @@ -289,6 +321,7 @@ teardown: index: test_k body: {"field": "foo"} - length: {terms: 1} + --- "Test search after keyword field": - do: @@ -389,6 +422,7 @@ teardown: terms_enum: index: test_* body: {"field": "foo", "string":"b", "timeout": "2m"} + --- "Test security": @@ -406,6 +440,13 @@ teardown: body: {"field": "foo", "string":"b"} - length: {terms: 1} + - do: + headers: { Authorization: "Basic ZGxzX2FsaWFzX3VzZXI6eC1wYWNrLXRlc3QtcGFzc3dvcmQ=" } # dls_alias_user sees all docs through the alias + terms_enum: + index: alias_security + body: { "field": "foo", "string": "b" } + - length: { terms: 1 } + - do: headers: { Authorization: "Basic ZGxzX3NvbWVfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" } # dls_some_user sees selected docs terms_enum: diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java index 3c68c33d44d2a..0707a0a9d69aa 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java @@ -65,10 +65,10 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.rest.ESRestTestCase; -import org.joda.time.Instant; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.time.Instant; import java.time.ZoneId; import java.util.Base64; import java.util.Collections; diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 73015e9df90da..0b420d59b5342 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -23,11 +23,11 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; -import org.joda.time.Instant; import org.junit.After; import org.junit.AfterClass; import java.io.IOException; +import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.List; diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 0cd73be5d108a..28eab3e26cf36 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.core.action.DeleteDataStreamAction; import org.elasticsearch.xpack.core.action.GetDataStreamAction; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.core.watcher.WatcherConstants; import org.elasticsearch.xpack.core.watcher.WatcherState; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; @@ -518,7 +519,7 @@ protected void startWatcher() throws Exception { protected void ensureLicenseEnabled() throws Exception { assertBusy(() -> { for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - assertThat(licenseState.checkFeature(XPackLicenseState.Feature.WATCHER), is(true)); + assertThat(WatcherConstants.WATCHER_FEATURE.check(licenseState), is(true)); } }); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportAction.java index 448c7fd85b148..460ae27a457ec 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction; +import org.elasticsearch.xpack.core.watcher.WatcherConstants; public class WatcherInfoTransportAction extends XPackInfoFeatureTransportAction { @@ -36,7 +37,7 @@ public String name() { @Override public boolean available() { - return licenseState.isAllowed(XPackLicenseState.Feature.WATCHER); + return WatcherConstants.WATCHER_FEATURE.checkWithoutTracking(licenseState); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java index dccbeda01c89c..4dfd927464dcd 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -25,6 +24,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; +import org.elasticsearch.xpack.core.watcher.WatcherConstants; import org.elasticsearch.xpack.core.watcher.WatcherFeatureSetUsage; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsAction; @@ -70,14 +70,14 @@ protected void masterOperation(Task task, XPackUsageRequest request, ClusterStat .filter(Objects::nonNull) .collect(Collectors.toList()); Counters mergedCounters = Counters.merge(countersPerNode); - WatcherFeatureSetUsage usage = - new WatcherFeatureSetUsage(licenseState.isAllowed(Feature.WATCHER), true, mergedCounters.toNestedMap()); + WatcherFeatureSetUsage usage = new WatcherFeatureSetUsage( + WatcherConstants.WATCHER_FEATURE.checkWithoutTracking(licenseState), true, mergedCounters.toNestedMap()); listener.onResponse(new XPackUsageFeatureResponse(usage)); }, listener::onFailure)); } } else { - WatcherFeatureSetUsage usage = - new WatcherFeatureSetUsage(licenseState.isAllowed(Feature.WATCHER), false, Collections.emptyMap()); + WatcherFeatureSetUsage usage = new WatcherFeatureSetUsage( + WatcherConstants.WATCHER_FEATURE.checkWithoutTracking(licenseState), false, Collections.emptyMap()); listener.onResponse(new XPackUsageFeatureResponse(usage)); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index a2452a57cd47d..e2333e8d09b3b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.watcher.WatcherConstants; abstract class WatcherTransportAction extends HandledTransportAction { @@ -36,7 +37,7 @@ protected String executor() { @Override protected final void doExecute(Task task, final Request request, ActionListener listener) { - if (licenseState.checkFeature(XPackLicenseState.Feature.WATCHER)) { + if (WatcherConstants.WATCHER_FEATURE.check(licenseState)) { doExecute(request, listener); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackField.WATCHER)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java index f7668972bf3d3..d7043fc71ba7a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherInfoTransportActionTests.java @@ -20,13 +20,14 @@ import org.elasticsearch.common.xcontent.ObjectPath; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.watcher.WatcherConstants; import org.elasticsearch.xpack.core.watcher.WatcherFeatureSetUsage; import org.elasticsearch.xpack.core.watcher.WatcherMetadata; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; @@ -52,12 +53,12 @@ public class WatcherInfoTransportActionTests extends ESTestCase { - private XPackLicenseState licenseState; + private MockLicenseState licenseState; private Client client; @Before public void init() throws Exception { - licenseState = mock(XPackLicenseState.class); + licenseState = mock(MockLicenseState.class); client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); @@ -69,7 +70,7 @@ public void testAvailable() { WatcherInfoTransportAction featureSet = new WatcherInfoTransportAction( mock(TransportService.class), mock(ActionFilters.class), Settings.EMPTY, licenseState); boolean available = randomBoolean(); - when(licenseState.isAllowed(XPackLicenseState.Feature.WATCHER)).thenReturn(available); + when(licenseState.isAllowed(WatcherConstants.WATCHER_FEATURE)).thenReturn(available); assertThat(featureSet.available(), is(available)); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java index 7fe8c83136144..5783aa6cdb38c 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -211,7 +212,7 @@ public void testParser() throws Exception { .field("from", "from@domain") .field("priority", priority.name()); if (dataAttachment != null) { - builder.field("attach_data", dataAttachment); + builder.field("attach_data", (ToXContentObject) dataAttachment); } else if (randomBoolean()) { dataAttachment = org.elasticsearch.xpack.watcher.notification.email.DataAttachment.DEFAULT; builder.field("attach_data", true); diff --git a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/120_terms_enum.yml b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/120_terms_enum.yml index c8b3be89b7d24..107cb488e8034 100644 --- a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/120_terms_enum.yml +++ b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/120_terms_enum.yml @@ -21,6 +21,20 @@ setup: ] } + - do: + security.put_role: + name: "terms_enum_alias_role" + body: > + { + "cluster": ["all"], + "indices": [ + { + "names": ["my_remote_cluster:terms_enum_alias"], + "privileges": ["read"] + } + ] + } + - do: security.put_user: username: "joe_all" @@ -30,6 +44,15 @@ setup: "roles" : [ "terms_enum_all_role" ] } + - do: + security.put_user: + username: "joe_alias" + body: > + { + "password": "s3krit-password", + "roles" : [ "terms_enum_alias_role" ] + } + - do: security.put_role: name: "terms_enum_none_role" @@ -82,6 +105,10 @@ teardown: security.delete_user: username: "joe_all" ignore: 404 + - do: + security.delete_user: + username: "joe_alias" + ignore: 404 - do: security.delete_user: username: "joe_none" @@ -94,6 +121,10 @@ teardown: security.delete_role: name: "terms_enum_all_role" ignore: 404 + - do: + security.delete_role: + name: "terms_enum_alias_role" + ignore: 404 - do: security.delete_role: name: "terms_enum_none_role" @@ -123,6 +154,15 @@ teardown: - match: { terms.0: "zar" } - match: { complete: true } + - do: + headers: { Authorization: "Basic am9lX2FsaWFzOnMza3JpdC1wYXNzd29yZA==" } # joe_alias can see all docs through alias + terms_enum: + index: my_remote_cluster:terms_enum_alias + body: { "field": "foo", "search_after": "foobar" } + - length: { terms: 1 } + - match: { terms.0: "zar" } + - match: { complete: true } + - do: headers: { Authorization: "Basic am9lX25vbmU6czNrcml0LXBhc3N3b3Jk" } # joe_none can't see docs terms_enum: diff --git a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml index b8feed5c68d7f..4f6fb2c388f03 100644 --- a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml +++ b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml @@ -79,6 +79,17 @@ setup: ] } + - do: + security.put_role: + name: "terms_enum_alias_role" + body: > + { + "cluster": ["monitor"], + "indices": [ + { "names": ["terms_enum_alias"], "privileges": ["read"], "query": "{\"term\": {\"ck\": \"const\"}}" } + ] + } + - do: security.put_role: name: "terms_enum_none_role" @@ -373,6 +384,8 @@ setup: indices.create: index: terms_enum_index body: + aliases: + terms_enum_alias: {} settings: index: number_of_shards: 1