From 3d86288edb90606eeca43ea4636a41e4397dd77f Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Mon, 6 May 2019 17:17:11 -0400 Subject: [PATCH 1/7] Force selection of calendar or fixed intervals in date histo agg (#33727) The date_histogram accepts an interval which can be either a calendar interval (DST-aware, leap seconds, arbitrary length of months, etc) or fixed interval (strict multiples of SI units). Unfortunately this is inferred by first trying to parse as a calendar interval, then falling back to fixed if that fails. This leads to confusing arrangement where `1d` == calendar, but `2d` == fixed. And if you want a day of fixed time, you have to specify `24h` (e.g. the next smallest unit). This arrangement is very error-prone for users. This PR adds `calendar_interval` and `fixed_interval` parameters to any code that uses intervals (date_histogram, rollup, composite, datafeed, etc). Calendar only accepts calendar intervals, fixed accepts any combination of units (meaning `1d` can be used to specify `24h` in fixed time), and both are mutually exclusive. The old interval behavior is deprecated and will throw a deprecation warning. It is also mutually exclusive with the two new parameters. In the future the old dual-purpose interval will be removed. The change applies to both REST and java clients. --- .../job/config/DateHistogramGroupConfig.java | 137 +++- .../org/elasticsearch/client/RollupIT.java | 12 +- .../documentation/RollupDocumentationIT.java | 14 +- .../ml/datafeed/DatafeedConfigTests.java | 3 +- .../rollup/GetRollupJobResponseTests.java | 2 +- .../rollup/PutRollupJobRequestTests.java | 2 +- .../config/DateHistogramGroupConfigTests.java | 16 +- docs/build.gradle | 6 +- .../bucket/datehistogram-aggregation.asciidoc | 4 +- docs/java-api/aggs.asciidoc | 2 +- docs/java-api/search.asciidoc | 2 +- .../high-level/rollup/put_job.asciidoc | 2 +- .../bucket/composite-aggregation.asciidoc | 16 +- .../bucket/datehistogram-aggregation.asciidoc | 16 +- docs/reference/aggregations/misc.asciidoc | 2 +- docs/reference/aggregations/pipeline.asciidoc | 8 +- .../pipeline/avg-bucket-aggregation.asciidoc | 2 +- .../bucket-script-aggregation.asciidoc | 2 +- .../bucket-selector-aggregation.asciidoc | 2 +- .../pipeline/bucket-sort-aggregation.asciidoc | 4 +- .../cumulative-sum-aggregation.asciidoc | 2 +- .../pipeline/derivative-aggregation.asciidoc | 6 +- ...extended-stats-bucket-aggregation.asciidoc | 2 +- .../pipeline/max-bucket-aggregation.asciidoc | 2 +- .../pipeline/min-bucket-aggregation.asciidoc | 2 +- .../pipeline/movfn-aggregation.asciidoc | 22 +- .../percentiles-bucket-aggregation.asciidoc | 2 +- .../pipeline/serial-diff-aggregation.asciidoc | 2 +- .../stats-bucket-aggregation.asciidoc | 2 +- .../pipeline/sum-bucket-aggregation.asciidoc | 2 +- docs/reference/ml/aggregations.asciidoc | 4 +- docs/reference/rollup/apis/get-job.asciidoc | 8 +- docs/reference/rollup/apis/put-job.asciidoc | 2 +- .../rollup/apis/rollup-caps.asciidoc | 4 +- .../rollup/apis/rollup-index-caps.asciidoc | 4 +- .../rollup/apis/rollup-job-config.asciidoc | 4 +- .../rollup/apis/rollup-search.asciidoc | 2 +- .../rollup/rollup-getting-started.asciidoc | 4 +- .../rollup/understanding-groups.asciidoc | 8 +- .../test/painless/70_mov_fn_agg.yml | 18 +- .../org/elasticsearch/search/CCSDuelIT.java | 4 +- .../test/search.aggregation/230_composite.yml | 70 +- .../search.aggregation/240_max_buckets.yml | 30 +- .../test/search.aggregation/250_moving_fn.yml | 36 +- .../test/search.aggregation/80_typed_keys.yml | 13 +- .../test/search/240_date_nanos.yml | 5 +- .../DateHistogramValuesSourceBuilder.java | 148 ++-- .../DateHistogramAggregationBuilder.java | 180 ++--- .../histogram/DateHistogramInterval.java | 20 + .../histogram/DateIntervalConsumer.java | 40 + .../bucket/histogram/DateIntervalWrapper.java | 423 ++++++++++ .../AggregatorFactoriesTests.java | 4 +- .../search/aggregations/MissingValueIT.java | 4 +- .../aggregations/bucket/DateHistogramIT.java | 21 +- .../CompositeAggregationBuilderTests.java | 5 +- .../composite/CompositeAggregatorTests.java | 11 + ...egacyIntervalCompositeAggBuilderTests.java | 155 ++++ .../DateHistogramAggregatorTests.java | 730 +++++++++++++++++- .../bucket/histogram/DateHistogramTests.java | 25 +- .../histogram/DateIntervalWrapperTests.java | 127 +++ .../pipeline/AvgBucketAggregatorTests.java | 2 +- .../CumulativeSumAggregatorTests.java | 5 +- .../aggregations/pipeline/MovFnUnitTests.java | 2 +- .../ml/datafeed/extractor/ExtractorUtils.java | 11 +- .../core/rollup/action/RollupJobCaps.java | 3 +- .../rollup/job/DateHistogramGroupConfig.java | 148 +++- .../xpack/core/rollup/job/GroupConfig.java | 2 +- .../GetDatafeedsActionResponseTests.java | 2 + .../core/ml/datafeed/DatafeedConfigTests.java | 21 +- .../core/ml/datafeed/DatafeedUpdateTests.java | 16 + .../extractor/ExtractorUtilsTests.java | 16 +- .../xpack/core/rollup/ConfigTestHelpers.java | 28 +- ...eHistogramGroupConfigSerializingTests.java | 84 +- .../integration/DataFramePivotRestIT.java | 14 +- .../integration/DataFrameRestTestCase.java | 12 +- .../transforms/pivot/PivotTests.java | 10 + .../ml/integration/DatafeedJobsRestIT.java | 24 +- .../TransportGetOverallBucketsAction.java | 3 +- .../DatafeedDelayedDataDetector.java | 4 +- .../RollupDataExtractorFactory.java | 11 +- .../extractor/DataExtractorFactoryTests.java | 13 +- .../rollup/RollupJobIdentifierUtils.java | 168 ++-- .../xpack/rollup/RollupRequestTranslator.java | 17 +- .../xpack/rollup/job/RollupIndexer.java | 8 +- .../rollup/RollupJobIdentifierUtilTests.java | 186 +++-- .../rollup/RollupRequestTranslationTests.java | 69 +- .../RollupResponseTranslationTests.java | 31 +- .../rollup/action/SearchActionTests.java | 50 +- .../xpack/rollup/config/ConfigTests.java | 17 +- .../xpack/rollup/job/IndexerUtilsTests.java | 10 +- .../job/RollupIndexerIndexingTests.java | 14 +- .../test/data_frame/preview_transforms.yml | 5 + .../rest-api-spec/test/ml/datafeeds_crud.yml | 2 +- .../rest-api-spec/test/rollup/delete_job.yml | 8 +- .../rest-api-spec/test/rollup/get_jobs.yml | 12 +- .../test/rollup/get_rollup_caps.yml | 20 +- .../test/rollup/get_rollup_index_caps.yml | 40 +- .../rest-api-spec/test/rollup/put_job.yml | 16 +- .../test/rollup/rollup_search.yml | 71 +- .../rest-api-spec/test/rollup/start_job.yml | 2 +- .../rest-api-spec/test/rollup/stop_job.yml | 2 +- .../xpack/restart/FullClusterRestartIT.java | 9 +- .../elasticsearch/multi_node/RollupIT.java | 4 +- x-pack/qa/rolling-upgrade/build.gradle | 5 + .../upgrades/RollupDateHistoUpgradeIT.java | 258 +++++++ .../mixed_cluster/40_ml_datafeed_crud.yml | 8 + .../test/old_cluster/40_ml_datafeed_crud.yml | 78 +- .../upgraded_cluster/40_ml_datafeed_crud.yml | 10 + 108 files changed, 3270 insertions(+), 688 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/LegacyIntervalCompositeAggBuilderTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java create mode 100644 x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java index 21a610f789460..e56b54766853a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfig.java @@ -22,6 +22,7 @@ import org.elasticsearch.client.ValidationException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -30,8 +31,11 @@ import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.Collections; +import java.util.HashSet; import java.util.Objects; import java.util.Optional; +import java.util.Set; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -59,14 +63,63 @@ public class DateHistogramGroupConfig implements Validatable, ToXContentObject { private static final String TIME_ZONE = "time_zone"; private static final String DELAY = "delay"; private static final String DEFAULT_TIMEZONE = "UTC"; + private static final String CALENDAR_INTERVAL = "calendar_interval"; + private static final String FIXED_INTERVAL = "fixed_interval"; + + // From DateHistogramAggregationBuilder in core, transplanted and modified to a set + // so we don't need to import a dependency on the class + private static final Set DATE_FIELD_UNITS; + static { + Set dateFieldUnits = new HashSet<>(); + dateFieldUnits.add("year"); + dateFieldUnits.add("1y"); + dateFieldUnits.add("quarter"); + dateFieldUnits.add("1q"); + dateFieldUnits.add("month"); + dateFieldUnits.add("1M"); + dateFieldUnits.add("week"); + dateFieldUnits.add("1w"); + dateFieldUnits.add("day"); + dateFieldUnits.add("1d"); + dateFieldUnits.add("hour"); + dateFieldUnits.add("1h"); + dateFieldUnits.add("minute"); + dateFieldUnits.add("1m"); + dateFieldUnits.add("second"); + dateFieldUnits.add("1s"); + DATE_FIELD_UNITS = Collections.unmodifiableSet(dateFieldUnits); + } private static final ConstructingObjectParser PARSER; static { - PARSER = new ConstructingObjectParser<>(NAME, true, a -> - new DateHistogramGroupConfig((String) a[0], (DateHistogramInterval) a[1], (DateHistogramInterval) a[2], (String) a[3])); + PARSER = new ConstructingObjectParser<>(NAME, true, a -> { + DateHistogramInterval oldInterval = (DateHistogramInterval) a[1]; + DateHistogramInterval calendarInterval = (DateHistogramInterval) a[2]; + DateHistogramInterval fixedInterval = (DateHistogramInterval) a[3]; + + if (oldInterval != null) { + if (calendarInterval != null || fixedInterval != null) { + throw new IllegalArgumentException("Cannot use [interval] with [fixed_interval] or [calendar_interval] " + + "configuration options."); + } + return new DateHistogramGroupConfig((String) a[0], oldInterval, (DateHistogramInterval) a[4], (String) a[5]); + } else if (calendarInterval != null && fixedInterval == null) { + return new CalendarInterval((String) a[0], calendarInterval, (DateHistogramInterval) a[4], (String) a[5]); + } else if (calendarInterval == null && fixedInterval != null) { + return new FixedInterval((String) a[0], fixedInterval, (DateHistogramInterval) a[4], (String) a[5]); + } else if (calendarInterval != null && fixedInterval != null) { + throw new IllegalArgumentException("Cannot set both [fixed_interval] and [calendar_interval] at the same time"); + } else { + throw new IllegalArgumentException("An interval is required. Use [fixed_interval] or [calendar_interval]."); + } + }); PARSER.declareString(constructorArg(), new ParseField(FIELD)); - PARSER.declareField(constructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING); - PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(DELAY), ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), + new ParseField(CALENDAR_INTERVAL), ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), + new ParseField(FIXED_INTERVAL), ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(DELAY), ValueType.STRING); PARSER.declareString(optionalConstructorArg(), new ParseField(TIME_ZONE)); } @@ -75,9 +128,58 @@ public class DateHistogramGroupConfig implements Validatable, ToXContentObject { private final DateHistogramInterval delay; private final String timeZone; + /** + * FixedInterval is a {@link DateHistogramGroupConfig} that uses a fixed time interval for rolling up data. + * The fixed time interval is one or multiples of SI units and has no calendar-awareness (e.g. doesn't account + * for leap corrections, does not have variable length months, etc). + * + * For calendar-aware rollups, use {@link CalendarInterval} + */ + public static class FixedInterval extends DateHistogramGroupConfig { + public FixedInterval(String field, DateHistogramInterval interval) { + this(field, interval, null, null); + } + + public FixedInterval(String field, DateHistogramInterval interval, DateHistogramInterval delay, String timeZone) { + super(field, interval, delay, timeZone); + // validate fixed time + TimeValue.parseTimeValue(interval.toString(), NAME + ".FixedInterval"); + } + } + + /** + * CalendarInterval is a {@link DateHistogramGroupConfig} that uses calendar-aware intervals for rolling up data. + * Calendar time intervals understand leap corrections and contextual differences in certain calendar units (e.g. + * months are variable length depending on the month). Calendar units are only available in singular quantities: + * 1s, 1m, 1h, 1d, 1w, 1q, 1M, 1y + * + * For fixed time rollups, use {@link FixedInterval} + */ + public static class CalendarInterval extends DateHistogramGroupConfig { + public CalendarInterval(String field, DateHistogramInterval interval) { + this(field, interval, null, null); + + } + + public CalendarInterval(String field, DateHistogramInterval interval, DateHistogramInterval delay, String timeZone) { + super(field, interval, delay, timeZone); + if (DATE_FIELD_UNITS.contains(interval.toString()) == false) { + throw new IllegalArgumentException("The supplied interval [" + interval +"] could not be parsed " + + "as a calendar interval."); + } + } + + } + /** * Create a new {@link DateHistogramGroupConfig} using the given field and interval parameters. + * + * @deprecated Build a DateHistoConfig using {@link DateHistogramGroupConfig.CalendarInterval} + * or {@link DateHistogramGroupConfig.FixedInterval} instead + * + * @since 7.2.0 */ + @Deprecated public DateHistogramGroupConfig(final String field, final DateHistogramInterval interval) { this(field, interval, null, null); } @@ -85,17 +187,22 @@ public DateHistogramGroupConfig(final String field, final DateHistogramInterval /** * Create a new {@link DateHistogramGroupConfig} using the given configuration parameters. *

- * The {@code field} and {@code interval} are required to compute the date histogram for the rolled up documents. - * The {@code delay} is optional and can be set to {@code null}. It defines how long to wait before rolling up new documents. - * The {@code timeZone} is optional and can be set to {@code null}. When configured, the time zone value is resolved using - * ({@link DateTimeZone#forID(String)} and must match a time zone identifier provided by the Joda Time library. + * The {@code field} and {@code interval} are required to compute the date histogram for the rolled up documents. + * The {@code delay} is optional and can be set to {@code null}. It defines how long to wait before rolling up new documents. + * The {@code timeZone} is optional and can be set to {@code null}. When configured, the time zone value is resolved using + * ({@link DateTimeZone#forID(String)} and must match a time zone identifier provided by the Joda Time library. *

- * - * @param field the name of the date field to use for the date histogram (required) + * @param field the name of the date field to use for the date histogram (required) * @param interval the interval to use for the date histogram (required) - * @param delay the time delay (optional) + * @param delay the time delay (optional) * @param timeZone the id of time zone to use to calculate the date histogram (optional). When {@code null}, the UTC timezone is used. + * + * @deprecated Build a DateHistoConfig using {@link DateHistogramGroupConfig.CalendarInterval} + * or {@link DateHistogramGroupConfig.FixedInterval} instead + * + * @since 7.2.0 */ + @Deprecated public DateHistogramGroupConfig(final String field, final DateHistogramInterval interval, final @Nullable DateHistogramInterval delay, @@ -153,7 +260,13 @@ public String getTimeZone() { public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); { - builder.field(INTERVAL, interval.toString()); + if (this.getClass().equals(CalendarInterval.class)) { + builder.field(CALENDAR_INTERVAL, interval.toString()); + } else if (this.getClass().equals(FixedInterval.class)) { + builder.field(FIXED_INTERVAL, interval.toString()); + } else { + builder.field(INTERVAL, interval.toString()); + } builder.field(FIELD, field); if (delay != null) { builder.field(DELAY, delay.toString()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java index d876ce6ed5fb3..db77d76b79389 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java @@ -152,7 +152,7 @@ public int indexDocs() throws Exception { public void testDeleteRollupJob() throws Exception { - final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY)); + final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY)); final List metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS)); final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600)); PutRollupJobRequest putRollupJobRequest = @@ -174,7 +174,7 @@ public void testDeleteMissingRollupJob() { public void testPutStartAndGetRollupJob() throws Exception { // TODO expand this to also test with histogram and terms? - final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY)); + final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY)); final List metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS)); final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600)); @@ -334,7 +334,7 @@ public void testGetRollupCaps() throws Exception { final String cron = "*/1 * * * * ?"; final int pageSize = randomIntBetween(numDocs, numDocs * 10); // TODO expand this to also test with histogram and terms? - final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY)); + final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY)); final List metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS)); final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600)); @@ -378,7 +378,7 @@ public void testGetRollupCaps() throws Exception { case "delay": assertThat(entry.getValue(), equalTo("foo")); break; - case "interval": + case "calendar_interval": assertThat(entry.getValue(), equalTo("1d")); break; case "time_zone": @@ -446,7 +446,7 @@ public void testGetRollupIndexCaps() throws Exception { final String cron = "*/1 * * * * ?"; final int pageSize = randomIntBetween(numDocs, numDocs * 10); // TODO expand this to also test with histogram and terms? - final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY)); + final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("date", DateHistogramInterval.DAY)); final List metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS)); final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600)); @@ -490,7 +490,7 @@ public void testGetRollupIndexCaps() throws Exception { case "delay": assertThat(entry.getValue(), equalTo("foo")); break; - case "interval": + case "calendar_interval": assertThat(entry.getValue(), equalTo("1d")); break; case "time_zone": diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java index 8125c2f41f4c9..2a1c98f0c3596 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java @@ -399,8 +399,8 @@ public void onFailure(Exception e) { public void testGetRollupCaps() throws Exception { RestHighLevelClient client = highLevelClient(); - DateHistogramGroupConfig dateHistogram = - new DateHistogramGroupConfig("timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1> + DateHistogramGroupConfig dateHistogram = new DateHistogramGroupConfig.FixedInterval( + "timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1> TermsGroupConfig terms = new TermsGroupConfig("hostname", "datacenter"); HistogramGroupConfig histogram = new HistogramGroupConfig(5L, "load", "net_in", "net_out"); GroupConfig groups = new GroupConfig(dateHistogram, histogram, terms); @@ -473,7 +473,8 @@ public void testGetRollupCaps() throws Exception { // item represents a different aggregation that can be run against the "timestamp" // field, and any additional details specific to that agg (interval, etc) List> timestampCaps = fieldCaps.get("timestamp").getAggs(); - assert timestampCaps.get(0).toString().equals("{agg=date_histogram, delay=7d, interval=1h, time_zone=UTC}"); + logger.error(timestampCaps.get(0).toString()); + assert timestampCaps.get(0).toString().equals("{agg=date_histogram, fixed_interval=1h, delay=7d, time_zone=UTC}"); // In contrast to the timestamp field, the temperature field has multiple aggs configured List> temperatureCaps = fieldCaps.get("temperature").getAggs(); @@ -515,8 +516,8 @@ public void onFailure(Exception e) { public void testGetRollupIndexCaps() throws Exception { RestHighLevelClient client = highLevelClient(); - DateHistogramGroupConfig dateHistogram = - new DateHistogramGroupConfig("timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1> + DateHistogramGroupConfig dateHistogram = new DateHistogramGroupConfig.FixedInterval( + "timestamp", DateHistogramInterval.HOUR, new DateHistogramInterval("7d"), "UTC"); // <1> TermsGroupConfig terms = new TermsGroupConfig("hostname", "datacenter"); HistogramGroupConfig histogram = new HistogramGroupConfig(5L, "load", "net_in", "net_out"); GroupConfig groups = new GroupConfig(dateHistogram, histogram, terms); @@ -587,7 +588,8 @@ public void testGetRollupIndexCaps() throws Exception { // item represents a different aggregation that can be run against the "timestamp" // field, and any additional details specific to that agg (interval, etc) List> timestampCaps = fieldCaps.get("timestamp").getAggs(); - assert timestampCaps.get(0).toString().equals("{agg=date_histogram, delay=7d, interval=1h, time_zone=UTC}"); + logger.error(timestampCaps.get(0).toString()); + assert timestampCaps.get(0).toString().equals("{agg=date_histogram, fixed_interval=1h, delay=7d, time_zone=UTC}"); // In contrast to the timestamp field, the temperature field has multiple aggs configured List> temperatureCaps = fieldCaps.get("temperature").getAggs(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java index 0b0ed52d0ff67..a3b475193e46b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.test.AbstractXContentTestCase; @@ -79,7 +80,7 @@ public static DatafeedConfig.Builder createRandomBuilder() { aggHistogramInterval = aggHistogramInterval <= 0 ? 1 : aggHistogramInterval; MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); aggs.addAggregator(AggregationBuilders.dateHistogram("buckets") - .interval(aggHistogramInterval).subAggregation(maxTime).field("time")); + .fixedInterval(new DateHistogramInterval(aggHistogramInterval + "ms")).subAggregation(maxTime).field("time")); try { builder.setAggregations(aggs); } catch (IOException e) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java index a063294cae6d7..67e118215d7ca 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java @@ -44,7 +44,7 @@ public void testFromXContent() throws IOException { this::createTestInstance, this::toXContent, GetRollupJobResponse::fromXContent) - .supportsUnknownFields(true) + .supportsUnknownFields(false) .randomFieldsExcludeFilter(field -> field.endsWith("status.current_position")) .test(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/PutRollupJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/PutRollupJobRequestTests.java index a49f85a1feda2..0056a7ad25cfb 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/PutRollupJobRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/PutRollupJobRequestTests.java @@ -49,7 +49,7 @@ protected PutRollupJobRequest doParseInstance(final XContentParser parser) throw @Override protected boolean supportsUnknownFields() { - return true; + return false; } public void testRequireConfiguration() { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfigTests.java index 2e6bb3f91547b..c11e6921ea912 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/job/config/DateHistogramGroupConfigTests.java @@ -90,9 +90,21 @@ public void testValidate() { static DateHistogramGroupConfig randomDateHistogramGroupConfig() { final String field = randomAlphaOfLength(randomIntBetween(3, 10)); - final DateHistogramInterval interval = new DateHistogramInterval(randomPositiveTimeValue()); final DateHistogramInterval delay = randomBoolean() ? new DateHistogramInterval(randomPositiveTimeValue()) : null; final String timezone = randomBoolean() ? randomDateTimeZone().toString() : null; - return new DateHistogramGroupConfig(field, interval, delay, timezone); + int i = randomIntBetween(0,2); + final DateHistogramInterval interval; + switch (i) { + case 0: + interval = new DateHistogramInterval(randomPositiveTimeValue()); + return new DateHistogramGroupConfig.FixedInterval(field, interval, delay, timezone); + case 1: + interval = new DateHistogramInterval(randomTimeValue(1,1, "m", "h", "d", "w")); + return new DateHistogramGroupConfig.CalendarInterval(field, interval, delay, timezone); + default: + interval = new DateHistogramInterval(randomPositiveTimeValue()); + return new DateHistogramGroupConfig(field, interval, delay, timezone); + } + } } diff --git a/docs/build.gradle b/docs/build.gradle index 5816546d7e221..8156d1d54b57a 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -614,7 +614,7 @@ buildRestTests.setups['sensor_rollup_job'] = ''' "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { @@ -683,7 +683,7 @@ buildRestTests.setups['sensor_started_rollup_job'] = ''' "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { @@ -800,7 +800,7 @@ buildRestTests.setups['sensor_prefab_data'] = ''' date_histogram: delay: "7d" field: "timestamp" - interval: "60m" + fixed_interval: "60m" time_zone: "UTC" terms: fields: diff --git a/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc index 1fe945077fdb7..610262b046c21 100644 --- a/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -16,7 +16,7 @@ AggregationBuilder aggregation = AggregationBuilders .dateHistogram("agg") .field("dateOfBirth") - .dateHistogramInterval(DateHistogramInterval.YEAR); + .calendarInterval(DateHistogramInterval.YEAR); -------------------------------------------------- Or if you want to set an interval of 10 days: @@ -27,7 +27,7 @@ AggregationBuilder aggregation = AggregationBuilders .dateHistogram("agg") .field("dateOfBirth") - .dateHistogramInterval(DateHistogramInterval.days(10)); + .fixedInterval(DateHistogramInterval.days(10)); -------------------------------------------------- diff --git a/docs/java-api/aggs.asciidoc b/docs/java-api/aggs.asciidoc index aa82d12137b45..c2e09b4901e87 100644 --- a/docs/java-api/aggs.asciidoc +++ b/docs/java-api/aggs.asciidoc @@ -47,7 +47,7 @@ SearchResponse sr = node.client().prepareSearch() AggregationBuilders.terms("by_country").field("country") .subAggregation(AggregationBuilders.dateHistogram("by_year") .field("dateOfBirth") - .dateHistogramInterval(DateHistogramInterval.YEAR) + .calendarInterval(DateHistogramInterval.YEAR) .subAggregation(AggregationBuilders.avg("avg_children").field("children")) ) ) diff --git a/docs/java-api/search.asciidoc b/docs/java-api/search.asciidoc index 47f53ba74f48d..ecf8415f4dcbe 100644 --- a/docs/java-api/search.asciidoc +++ b/docs/java-api/search.asciidoc @@ -109,7 +109,7 @@ SearchResponse sr = client.prepareSearch() .addAggregation( AggregationBuilders.dateHistogram("agg2") .field("birth") - .dateHistogramInterval(DateHistogramInterval.YEAR) + .calendarInterval(DateHistogramInterval.YEAR) ) .get(); diff --git a/docs/java-rest/high-level/rollup/put_job.asciidoc b/docs/java-rest/high-level/rollup/put_job.asciidoc index 0b7ece05ca89b..9a83f6022ecf2 100644 --- a/docs/java-rest/high-level/rollup/put_job.asciidoc +++ b/docs/java-rest/high-level/rollup/put_job.asciidoc @@ -54,7 +54,7 @@ Using the REST API, we could define this grouping configuration: "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "calendar_interval": "1h", "delay": "7d", "time_zone": "UTC" }, diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index a771bd4645200..45ab691604842 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -226,7 +226,7 @@ GET /_search "my_buckets": { "composite" : { "sources" : [ - { "date": { "date_histogram" : { "field": "timestamp", "interval": "1d" } } } + { "date": { "date_histogram" : { "field": "timestamp", "calendar_interval": "1d" } } } ] } } @@ -260,7 +260,7 @@ GET /_search "date": { "date_histogram" : { "field": "timestamp", - "interval": "1d", + "calendar_interval": "1d", "format": "yyyy-MM-dd" <1> } } @@ -299,7 +299,7 @@ GET /_search "my_buckets": { "composite" : { "sources" : [ - { "date": { "date_histogram": { "field": "timestamp", "interval": "1d" } } }, + { "date": { "date_histogram": { "field": "timestamp", "calendar_interval": "1d" } } }, { "product": { "terms": {"field": "product" } } } ] } @@ -324,7 +324,7 @@ GET /_search "sources" : [ { "shop": { "terms": {"field": "shop" } } }, { "product": { "terms": { "field": "product" } } }, - { "date": { "date_histogram": { "field": "timestamp", "interval": "1d" } } } + { "date": { "date_histogram": { "field": "timestamp", "calendar_interval": "1d" } } } ] } } @@ -352,7 +352,7 @@ GET /_search "my_buckets": { "composite" : { "sources" : [ - { "date": { "date_histogram": { "field": "timestamp", "interval": "1d", "order": "desc" } } }, + { "date": { "date_histogram": { "field": "timestamp", "calendar_interval": "1d", "order": "desc" } } }, { "product": { "terms": {"field": "product", "order": "asc" } } } ] } @@ -420,7 +420,7 @@ GET /_search "composite" : { "size": 2, "sources" : [ - { "date": { "date_histogram": { "field": "timestamp", "interval": "1d" } } }, + { "date": { "date_histogram": { "field": "timestamp", "calendar_interval": "1d" } } }, { "product": { "terms": {"field": "product" } } } ] } @@ -486,7 +486,7 @@ GET /_search "composite" : { "size": 2, "sources" : [ - { "date": { "date_histogram": { "field": "timestamp", "interval": "1d", "order": "desc" } } }, + { "date": { "date_histogram": { "field": "timestamp", "calendar_interval": "1d", "order": "desc" } } }, { "product": { "terms": {"field": "product", "order": "asc" } } } ], "after": { "date": 1494288000000, "product": "mad max" } <1> @@ -515,7 +515,7 @@ GET /_search "my_buckets": { "composite" : { "sources" : [ - { "date": { "date_histogram": { "field": "timestamp", "interval": "1d", "order": "desc" } } }, + { "date": { "date_histogram": { "field": "timestamp", "calendar_interval": "1d", "order": "desc" } } }, { "product": { "terms": {"field": "product" } } } ] }, diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 07a6fd257ef33..2ee40b24a8548 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -135,7 +135,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" } } } @@ -158,7 +158,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "interval" : "90m" + "fixed_interval" : "90m" } } } @@ -186,7 +186,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "interval" : "1M", + "calendar_interval" : "1M", "format" : "yyyy-MM-dd" <1> } } @@ -259,7 +259,7 @@ GET my_index/_search?size=0 "by_day": { "date_histogram": { "field": "date", - "interval": "day" + "calendar_interval": "day" } } } @@ -301,7 +301,7 @@ GET my_index/_search?size=0 "by_day": { "date_histogram": { "field": "date", - "interval": "day", + "calendar_interval": "day", "time_zone": "-01:00" } } @@ -380,7 +380,7 @@ GET my_index/_search?size=0 "by_day": { "date_histogram": { "field": "date", - "interval": "day", + "calendar_interval": "day", "offset": "+6h" } } @@ -432,7 +432,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "interval" : "1M", + "calendar_interval" : "1M", "format" : "yyyy-MM-dd", "keyed": true } @@ -502,7 +502,7 @@ POST /sales/_search?size=0 "sale_date" : { "date_histogram" : { "field" : "date", - "interval": "year", + "calendar_interval": "year", "missing": "2000/01/01" <1> } } diff --git a/docs/reference/aggregations/misc.asciidoc b/docs/reference/aggregations/misc.asciidoc index 288643dbf9313..678ebc0a8a4c6 100644 --- a/docs/reference/aggregations/misc.asciidoc +++ b/docs/reference/aggregations/misc.asciidoc @@ -102,7 +102,7 @@ GET /twitter/_search?typed_keys "tweets_over_time": { "date_histogram": { "field": "date", - "interval": "year" + "calendar_interval": "year" }, "aggregations": { "top_users": { diff --git a/docs/reference/aggregations/pipeline.asciidoc b/docs/reference/aggregations/pipeline.asciidoc index 37c1c357007b0..81d711cc29ce7 100644 --- a/docs/reference/aggregations/pipeline.asciidoc +++ b/docs/reference/aggregations/pipeline.asciidoc @@ -57,7 +57,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"timestamp", - "interval":"day" + "calendar_interval":"day" }, "aggs":{ "the_sum":{ @@ -88,7 +88,7 @@ POST /_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { @@ -125,7 +125,7 @@ POST /_search "my_date_histo": { "date_histogram": { "field":"timestamp", - "interval":"day" + "calendar_interval":"day" }, "aggs": { "the_movavg": { @@ -153,7 +153,7 @@ POST /sales/_search "histo": { "date_histogram": { "field": "date", - "interval": "day" + "calendar_interval": "day" }, "aggs": { "categories": { diff --git a/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc index ea150b4ab6d8b..33ccf505e5b8b 100644 --- a/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/avg-bucket-aggregation.asciidoc @@ -42,7 +42,7 @@ POST /_search "sales_per_month": { "date_histogram": { "field": "date", - "interval": "month" + "calendar_interval": "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc b/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc index 7dc99ba7719cd..6ecd1248a4e31 100644 --- a/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/bucket-script-aggregation.asciidoc @@ -50,7 +50,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "total_sales": { diff --git a/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc b/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc index 7ec19174a06e3..41ce04803fbe1 100644 --- a/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/bucket-selector-aggregation.asciidoc @@ -53,7 +53,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "total_sales": { diff --git a/docs/reference/aggregations/pipeline/bucket-sort-aggregation.asciidoc b/docs/reference/aggregations/pipeline/bucket-sort-aggregation.asciidoc index a136a6ee4d578..d219e005d75d0 100644 --- a/docs/reference/aggregations/pipeline/bucket-sort-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/bucket-sort-aggregation.asciidoc @@ -56,7 +56,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "total_sales": { @@ -144,7 +144,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "bucket_truncate": { diff --git a/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc b/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc index a6dff7fa84651..226fd9c1bd106 100644 --- a/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/cumulative-sum-aggregation.asciidoc @@ -40,7 +40,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc b/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc index f40ace7432d57..d987294f96566 100644 --- a/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/derivative-aggregation.asciidoc @@ -43,7 +43,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { @@ -137,7 +137,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { @@ -237,7 +237,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/extended-stats-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/extended-stats-bucket-aggregation.asciidoc index c35223885fce0..8f9522ec3e5db 100644 --- a/docs/reference/aggregations/pipeline/extended-stats-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/extended-stats-bucket-aggregation.asciidoc @@ -44,7 +44,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc index 53a3aaa28f7e5..58bdab6128bfa 100644 --- a/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/max-bucket-aggregation.asciidoc @@ -42,7 +42,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc index 620cf02c714fa..8f075f7e071d0 100644 --- a/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/min-bucket-aggregation.asciidoc @@ -42,7 +42,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc index f6fb25c76f662..ea414237174e6 100644 --- a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc @@ -46,7 +46,7 @@ POST /_search "my_date_histo":{ <1> "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -148,7 +148,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -207,7 +207,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -250,7 +250,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -293,7 +293,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -338,7 +338,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -390,7 +390,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -436,7 +436,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -488,7 +488,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -546,7 +546,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -612,7 +612,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ diff --git a/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc index 456a4046c0624..6620e5689cb60 100644 --- a/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc @@ -43,7 +43,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc b/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc index 1506e39685845..7a414b95d4025 100644 --- a/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/serial-diff-aggregation.asciidoc @@ -69,7 +69,7 @@ POST /_search "my_date_histo": { <1> "date_histogram": { "field": "timestamp", - "interval": "day" + "calendar_interval": "day" }, "aggs": { "the_sum": { diff --git a/docs/reference/aggregations/pipeline/stats-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/stats-bucket-aggregation.asciidoc index 2c9f585ebea3a..8c6359fb7765e 100644 --- a/docs/reference/aggregations/pipeline/stats-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/stats-bucket-aggregation.asciidoc @@ -41,7 +41,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc index 83e0e32135040..f0a19f3254126 100644 --- a/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc @@ -41,7 +41,7 @@ POST /sales/_search "sales_per_month" : { "date_histogram" : { "field" : "date", - "interval" : "month" + "calendar_interval" : "month" }, "aggs": { "sales": { diff --git a/docs/reference/ml/aggregations.asciidoc b/docs/reference/ml/aggregations.asciidoc index a50016807a714..1fad9f1b2bb29 100644 --- a/docs/reference/ml/aggregations.asciidoc +++ b/docs/reference/ml/aggregations.asciidoc @@ -63,7 +63,7 @@ PUT _ml/datafeeds/datafeed-farequote "buckets": { "date_histogram": { "field": "time", - "interval": "360s", + "fixed_interval": "360s", "time_zone": "UTC" }, "aggregations": { @@ -119,7 +119,7 @@ pipeline aggregation to find the first order derivative of the counter "buckets": { "date_histogram": { "field": "@timestamp", - "interval": "5m" + "fixed_interval": "5m" }, "aggregations": { "@timestamp": { diff --git a/docs/reference/rollup/apis/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc index ff4d62fb8002c..4e39778eebdd0 100644 --- a/docs/reference/rollup/apis/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -63,7 +63,7 @@ Which will yield the following response: "cron" : "*/30 * * * * ?", "groups" : { "date_histogram" : { - "interval" : "1h", + "fixed_interval" : "1h", "delay": "7d", "field": "timestamp", "time_zone": "UTC" @@ -149,7 +149,7 @@ PUT _rollup/job/sensor2 <1> "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { @@ -189,7 +189,7 @@ Which will yield the following response: "cron" : "*/30 * * * * ?", "groups" : { "date_histogram" : { - "interval" : "1h", + "fixed_interval" : "1h", "delay": "7d", "field": "timestamp", "time_zone": "UTC" @@ -244,7 +244,7 @@ Which will yield the following response: "cron" : "*/30 * * * * ?", "groups" : { "date_histogram" : { - "interval" : "1h", + "fixed_interval" : "1h", "delay": "7d", "field": "timestamp", "time_zone": "UTC" diff --git a/docs/reference/rollup/apis/put-job.asciidoc b/docs/reference/rollup/apis/put-job.asciidoc index b43c5a0e90b2a..eac71a48b4336 100644 --- a/docs/reference/rollup/apis/put-job.asciidoc +++ b/docs/reference/rollup/apis/put-job.asciidoc @@ -68,7 +68,7 @@ PUT _rollup/job/sensor "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { diff --git a/docs/reference/rollup/apis/rollup-caps.asciidoc b/docs/reference/rollup/apis/rollup-caps.asciidoc index a0de0f99f9872..e50806f3c1e0e 100644 --- a/docs/reference/rollup/apis/rollup-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-caps.asciidoc @@ -62,7 +62,7 @@ PUT _rollup/job/sensor "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { @@ -125,7 +125,7 @@ Which will yield the following response: { "agg" : "date_histogram", "time_zone" : "UTC", - "interval" : "1h", + "fixed_interval" : "1h", "delay": "7d" } ], diff --git a/docs/reference/rollup/apis/rollup-index-caps.asciidoc b/docs/reference/rollup/apis/rollup-index-caps.asciidoc index 1fad99e0311de..a0697ba70326e 100644 --- a/docs/reference/rollup/apis/rollup-index-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-index-caps.asciidoc @@ -53,7 +53,7 @@ PUT _rollup/job/sensor "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { @@ -118,7 +118,7 @@ This will yield the following response: { "agg" : "date_histogram", "time_zone" : "UTC", - "interval" : "1h", + "fixed_interval" : "1h", "delay": "7d" } ], diff --git a/docs/reference/rollup/apis/rollup-job-config.asciidoc b/docs/reference/rollup/apis/rollup-job-config.asciidoc index 852f7b879fb38..8277834d5e449 100644 --- a/docs/reference/rollup/apis/rollup-job-config.asciidoc +++ b/docs/reference/rollup/apis/rollup-job-config.asciidoc @@ -24,7 +24,7 @@ PUT _rollup/job/sensor "groups" : { "date_histogram": { "field": "timestamp", - "interval": "60m", + "fixed_interval": "60m", "delay": "7d" }, "terms": { @@ -100,7 +100,7 @@ fields will then be available later for aggregating into buckets. For example, "groups" : { "date_histogram": { "field": "timestamp", - "interval": "60m", + "fixed_interval": "60m", "delay": "7d" }, "terms": { diff --git a/docs/reference/rollup/apis/rollup-search.asciidoc b/docs/reference/rollup/apis/rollup-search.asciidoc index 244f304ed917b..ec2a554d09ff4 100644 --- a/docs/reference/rollup/apis/rollup-search.asciidoc +++ b/docs/reference/rollup/apis/rollup-search.asciidoc @@ -62,7 +62,7 @@ PUT _rollup/job/sensor "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { diff --git a/docs/reference/rollup/rollup-getting-started.asciidoc b/docs/reference/rollup/rollup-getting-started.asciidoc index 71a8ed73cc851..3e2d694464ee2 100644 --- a/docs/reference/rollup/rollup-getting-started.asciidoc +++ b/docs/reference/rollup/rollup-getting-started.asciidoc @@ -39,7 +39,7 @@ PUT _rollup/job/sensor "groups" : { "date_histogram": { "field": "timestamp", - "interval": "60m" + "fixed_interval": "60m" }, "terms": { "fields": ["node"] @@ -194,7 +194,7 @@ GET /sensor_rollup/_rollup_search "timeline": { "date_histogram": { "field": "timestamp", - "interval": "7d" + "fixed_interval": "7d" }, "aggs": { "nodes": { diff --git a/docs/reference/rollup/understanding-groups.asciidoc b/docs/reference/rollup/understanding-groups.asciidoc index 4733467ec3364..a59c19fbf5cc6 100644 --- a/docs/reference/rollup/understanding-groups.asciidoc +++ b/docs/reference/rollup/understanding-groups.asciidoc @@ -22,7 +22,7 @@ based on which groups are potentially useful to future queries. For example, th "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { @@ -47,7 +47,7 @@ Importantly, these aggs/fields can be used in any combination. This aggregation "hourly": { "date_histogram": { "field": "timestamp", - "interval": "1h" + "fixed_interval": "1h" }, "aggs": { "host_names": { @@ -69,7 +69,7 @@ is just as valid as this aggregation: "hourly": { "date_histogram": { "field": "timestamp", - "interval": "1h" + "fixed_interval": "1h" }, "aggs": { "data_center": { @@ -171,7 +171,7 @@ PUT _rollup/job/combined "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h", + "fixed_interval": "1h", "delay": "7d" }, "terms": { diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml index c2fb38611a30d..cca143f0bcc09 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml @@ -66,7 +66,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -98,7 +98,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -130,7 +130,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -162,7 +162,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -189,7 +189,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -216,7 +216,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -243,7 +243,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -270,7 +270,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -296,7 +296,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java index 4a18ddbe1b696..558e6071255b1 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java @@ -564,7 +564,7 @@ public void testDateHistogram() throws Exception { tags.showTermDocCountError(true); DateHistogramAggregationBuilder creation = new DateHistogramAggregationBuilder("creation"); creation.field("creationDate"); - creation.dateHistogramInterval(DateHistogramInterval.QUARTER); + creation.calendarInterval(DateHistogramInterval.QUARTER); creation.subAggregation(tags); sourceBuilder.aggregation(creation); duelSearch(searchRequest, CCSDuelIT::assertAggs); @@ -591,7 +591,7 @@ public void testPipelineAggs() throws Exception { sourceBuilder.size(0); DateHistogramAggregationBuilder daily = new DateHistogramAggregationBuilder("daily"); daily.field("creationDate"); - daily.dateHistogramInterval(DateHistogramInterval.DAY); + daily.calendarInterval(DateHistogramInterval.DAY); sourceBuilder.aggregation(daily); daily.subAggregation(new DerivativePipelineAggregationBuilder("derivative", "_count")); sourceBuilder.aggregation(new MaxBucketPipelineAggregationBuilder("biggest_day", "daily._count")); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 535e5565008c4..c643be01613b6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -264,10 +264,13 @@ setup: --- "Composite aggregation with format": - skip: - version: " - 6.2.99" - reason: this uses a new option (format) added in 6.3.0 + version: " - 7.1.99" #TODO change this after backport + reason: calendar_interval introduced in 7.2.0 + features: warnings - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' search: rest_total_hits_as_int: true index: test @@ -295,6 +298,8 @@ setup: - match: { aggregations.test.buckets.1.doc_count: 1 } - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' search: rest_total_hits_as_int: true index: test @@ -322,6 +327,67 @@ setup: - match: { aggregations.test.buckets.0.key.date: "2017-10-21" } - match: { aggregations.test.buckets.0.doc_count: 1 } +--- +"Composite aggregation with format and calendar_interval": + - skip: + version: " - 7.1.99" #TODO change this after backport + reason: calendar_interval introduced in 7.2.0 + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + test: + composite: + sources: [ + { + "date": { + "date_histogram": { + "field": "date", + "calendar_interval": "1d", + "format": "yyyy-MM-dd" + } + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.test.buckets: 2 } + - match: { aggregations.test.buckets.0.key.date: "2017-10-20" } + - match: { aggregations.test.buckets.0.doc_count: 1 } + - match: { aggregations.test.buckets.1.key.date: "2017-10-21" } + - match: { aggregations.test.buckets.1.doc_count: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + test: + composite: + after: { + date: "2017-10-20" + } + sources: [ + { + "date": { + "date_histogram": { + "field": "date", + "calendar_interval": "1d", + "format": "yyyy-MM-dd" + } + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.test.buckets: 1 } + - match: { aggregations.test.buckets.0.key.date: "2017-10-21" } + - match: { aggregations.test.buckets.0.doc_count: 1 } + --- "Composite aggregation with after_key in the response": - skip: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml index 3dd8d345043c3..981bafb6538b3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml @@ -85,18 +85,6 @@ setup: transient: search.max_buckets: 3 - - do: - catch: /.*Trying to create too many buckets.*/ - search: - rest_total_hits_as_int: true - index: test - body: - aggregations: - test: - date_histogram: - field: date - interval: 1d - - do: catch: /.*Trying to create too many buckets.*/ search: @@ -107,17 +95,12 @@ setup: test: terms: field: keyword - aggs: - 2: - date_histogram: - field: date - interval: 1d - do: cluster.put_settings: body: transient: - search.max_buckets: 100 + search.max_buckets: 6 - do: catch: /.*Trying to create too many buckets.*/ @@ -127,7 +110,10 @@ setup: body: aggregations: test: - date_histogram: - field: date - interval: 1d - min_doc_count: 0 + terms: + field: keyword + aggs: + 2: + terms: + field: date + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml index 0a7affd276aea..433407f90575a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml @@ -6,6 +6,10 @@ setup: --- "Bad window": + - skip: + version: " - 7.99.0" #TODO change this after backport + reason: "calendar_interval added in 7.2" + - do: catch: /\[window\] must be a positive, non-zero integer\./ search: @@ -16,7 +20,7 @@ setup: the_histo: date_histogram: field: "date" - interval: "1d" + calendar_interval: "1d" aggs: the_avg: avg: @@ -28,6 +32,36 @@ setup: script: "MovingFunctions.windowMax(values)" --- +"Bad window deprecated interval": + + - skip: + version: " - 7.99.0" #TODO change this after backport + reason: "interval deprecation added in 7.2" + features: "warnings" + + - do: + catch: /\[window\] must be a positive, non-zero integer\./ + warnings: + - "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future." + search: + rest_total_hits_as_int: true + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: -1 + script: "MovingFunctions.windowMax(values)" +--- "Not under date_histo": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml index 7897d1feb5aa6..01309274f05b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml @@ -206,9 +206,8 @@ setup: --- "Test typed keys parameter for date_histogram aggregation and max_bucket pipeline aggregation": - skip: - features: warnings - version: " - 6.3.99" - reason: "deprecation added in 6.4.0" + version: " - 7.1.99" #TODO change this after backport + reason: "calendar_interval added in 7.2" - do: warnings: - 'The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.' @@ -221,13 +220,13 @@ setup: test_created_histogram: date_histogram: field: created - interval: month + calendar_interval: month aggregations: test_sum: sum: field: num - test_moving_avg: - moving_avg: + test_deriv: + derivative: buckets_path: "test_sum" test_max_bucket: max_bucket: @@ -236,5 +235,5 @@ setup: - is_true: aggregations.date_histogram#test_created_histogram - is_true: aggregations.date_histogram#test_created_histogram.buckets.0.sum#test_sum - is_true: aggregations.date_histogram#test_created_histogram.buckets.1.sum#test_sum - - is_true: aggregations.date_histogram#test_created_histogram.buckets.1.simple_value#test_moving_avg + - is_true: aggregations.date_histogram#test_created_histogram.buckets.1.derivative#test_deriv - is_true: aggregations.bucket_metric_value#test_max_bucket diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml index f5345ba57b1b7..a775e51a712c2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml @@ -124,6 +124,9 @@ setup: --- "date histogram aggregation with date and date_nanos mapping": + - skip: + version: " - 7.99.99" #TODO change this after backport + reason: calendar_interval introduced in 7.2.0 - do: bulk: @@ -148,7 +151,7 @@ setup: date: date_histogram: field: date - interval: 1d + calendar_interval: 1d - match: { hits.total: 4 } - length: { aggregations.date.buckets: 2 } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index 53a7832884c76..bb7632278de91 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -19,12 +19,10 @@ package org.elasticsearch.search.aggregations.bucket.composite; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -33,7 +31,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.bucket.histogram.DateIntervalConsumer; +import org.elasticsearch.search.aggregations.bucket.histogram.DateIntervalWrapper; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; @@ -44,32 +43,19 @@ import java.time.ZoneOffset; import java.util.Objects; -import static org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder.DATE_FIELD_UNITS; - /** * A {@link CompositeValuesSourceBuilder} that builds a {@link RoundingValuesSource} from a {@link Script} or * a field name using the provided interval. */ -public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuilder { +public class DateHistogramValuesSourceBuilder + extends CompositeValuesSourceBuilder implements DateIntervalConsumer { static final String TYPE = "date_histogram"; private static final ObjectParser PARSER; static { PARSER = new ObjectParser<>(DateHistogramValuesSourceBuilder.TYPE); PARSER.declareString(DateHistogramValuesSourceBuilder::format, new ParseField("format")); - PARSER.declareField((histogram, interval) -> { - if (interval instanceof Long) { - histogram.interval((long) interval); - } else { - histogram.dateHistogramInterval((DateHistogramInterval) interval); - } - }, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } else { - return new DateHistogramInterval(p.text()); - } - }, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG); + DateIntervalWrapper.declareIntervalFields(PARSER); PARSER.declareField(DateHistogramValuesSourceBuilder::timeZone, p -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { return ZoneId.of(p.text()); @@ -83,9 +69,8 @@ static DateHistogramValuesSourceBuilder parse(String name, XContentParser parser return PARSER.parse(parser, new DateHistogramValuesSourceBuilder(name), null); } - private long interval = 0; private ZoneId timeZone = null; - private DateHistogramInterval dateHistogramInterval; + private DateIntervalWrapper dateHistogramInterval = new DateIntervalWrapper(); public DateHistogramValuesSourceBuilder(String name) { super(name, ValueType.DATE); @@ -93,33 +78,19 @@ public DateHistogramValuesSourceBuilder(String name) { protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException { super(in); - this.interval = in.readLong(); - this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); - if (in.getVersion().before(Version.V_7_0_0)) { - this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone()); - } else { - this.timeZone = in.readOptionalZoneId(); - } + dateHistogramInterval = new DateIntervalWrapper(in); + timeZone = in.readOptionalZoneId(); } @Override protected void innerWriteTo(StreamOutput out) throws IOException { - out.writeLong(interval); - out.writeOptionalWriteable(dateHistogramInterval); - if (out.getVersion().before(Version.V_7_0_0)) { - out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone)); - } else { - out.writeOptionalZoneId(timeZone); - } + dateHistogramInterval.writeTo(out); + out.writeOptionalZoneId(timeZone); } @Override protected void doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (dateHistogramInterval == null) { - builder.field(Histogram.INTERVAL_FIELD.getPreferredName(), interval); - } else { - builder.field(Histogram.INTERVAL_FIELD.getPreferredName(), dateHistogramInterval.toString()); - } + dateHistogramInterval.toXContent(builder, params); if (timeZone != null) { builder.field("time_zone", timeZone.toString()); } @@ -127,13 +98,12 @@ protected void doXContentBody(XContentBuilder builder, Params params) throws IOE @Override protected int innerHashCode() { - return Objects.hash(interval, dateHistogramInterval, timeZone); + return Objects.hash(dateHistogramInterval, timeZone); } @Override protected boolean innerEquals(DateHistogramValuesSourceBuilder other) { - return Objects.equals(interval, other.interval) - && Objects.equals(dateHistogramInterval, other.dateHistogramInterval) + return Objects.equals(dateHistogramInterval, other.dateHistogramInterval) && Objects.equals(timeZone, other.timeZone); } @@ -145,38 +115,84 @@ public String type() { /** * Returns the interval in milliseconds that is set on this source **/ + @Deprecated public long interval() { - return interval; + return dateHistogramInterval.interval(); } /** * Sets the interval on this source. * If both {@link #interval()} and {@link #dateHistogramInterval()} are set, * then the {@link #dateHistogramInterval()} wins. + * + * @deprecated Use {@link #calendarInterval(DateHistogramInterval)} or {@link #fixedInterval(DateHistogramInterval)} instead + * @since 7.2.0 **/ + @Deprecated public DateHistogramValuesSourceBuilder interval(long interval) { - if (interval < 1) { - throw new IllegalArgumentException("[interval] must be 1 or greater for [date_histogram] source"); - } - this.interval = interval; + dateHistogramInterval.interval(interval); return this; } /** * Returns the date interval that is set on this source **/ + @Deprecated public DateHistogramInterval dateHistogramInterval() { - return dateHistogramInterval; + return dateHistogramInterval.dateHistogramInterval(); } - public DateHistogramValuesSourceBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) { - if (dateHistogramInterval == null) { - throw new IllegalArgumentException("[dateHistogramInterval] must not be null"); - } - this.dateHistogramInterval = dateHistogramInterval; + /** + * @deprecated Use {@link #calendarInterval(DateHistogramInterval)} or {@link #fixedInterval(DateHistogramInterval)} instead + * @since 7.2.0 + */ + @Deprecated + public DateHistogramValuesSourceBuilder dateHistogramInterval(DateHistogramInterval interval) { + dateHistogramInterval.dateHistogramInterval(interval); + return this; + } + + /** + * Sets the interval of the DateHistogram using calendar units (`1d`, `1w`, `1M`, etc). These units + * are calendar-aware, meaning they respect leap additions, variable days per month, etc. + * + * This is mutually exclusive with {@link DateHistogramValuesSourceBuilder#fixedInterval(DateHistogramInterval)} + * + * @param interval The calendar interval to use with the aggregation + */ + public DateHistogramValuesSourceBuilder calendarInterval(DateHistogramInterval interval) { + dateHistogramInterval.calendarInterval(interval); + return this; + } + + /** + * Sets the interval of the DateHistogram using fixed units (`1ms`, `1s`, `10m`, `4h`, etc). These are + * not calendar aware and are simply multiples of fixed, SI units. + * + * This is mutually exclusive with {@link DateHistogramValuesSourceBuilder#calendarInterval(DateHistogramInterval)} + * + * @param interval The fixed interval to use with the aggregation + */ + public DateHistogramValuesSourceBuilder fixedInterval(DateHistogramInterval interval) { + dateHistogramInterval.fixedInterval(interval); return this; } + /** Return the interval as a date time unit if applicable, regardless of how it was configured. If this returns + * {@code null} then it means that the interval is expressed as a fixed + * {@link TimeValue} and may be accessed via {@link #getIntervalAsFixed()} ()}. */ + public DateHistogramInterval getIntervalAsCalendar() { + return dateHistogramInterval.getAsCalendarInterval(); + } + + /** + * Get the interval as a {@link TimeValue}, regardless of how it was configured. Returns null if + * the interval cannot be parsed as a fixed time. + */ + public DateHistogramInterval getIntervalAsFixed() { + return dateHistogramInterval.getAsFixedInterval(); + } + /** * Sets the time zone to use for this aggregation */ @@ -195,31 +211,9 @@ public ZoneId timeZone() { return timeZone; } - private Rounding createRounding() { - Rounding.Builder tzRoundingBuilder; - if (dateHistogramInterval != null) { - Rounding.DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString()); - if (dateTimeUnit != null) { - tzRoundingBuilder = Rounding.builder(dateTimeUnit); - } else { - // the interval is a time value? - tzRoundingBuilder = Rounding.builder( - TimeValue.parseTimeValue(dateHistogramInterval.toString(), null, getClass().getSimpleName() + ".interval")); - } - } else { - // the interval is an integer time value in millis? - tzRoundingBuilder = Rounding.builder(TimeValue.timeValueMillis(interval)); - } - if (timeZone() != null) { - tzRoundingBuilder.timeZone(timeZone()); - } - Rounding rounding = tzRoundingBuilder.build(); - return rounding; - } - @Override protected CompositeValuesSourceConfig innerBuild(SearchContext context, ValuesSourceConfig config) throws IOException { - Rounding rounding = createRounding(); + Rounding rounding = dateHistogramInterval.createRounding(timeZone()); ValuesSource orig = config.toValuesSource(context.getQueryShardContext()); if (orig == null) { orig = ValuesSource.Numeric.EMPTY; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index 6d7852a864453..52aebd43c5de7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -70,7 +70,7 @@ * A builder for histograms on date fields. */ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder - implements MultiBucketAggregationBuilder { + implements MultiBucketAggregationBuilder, DateIntervalConsumer { public static final String NAME = "date_histogram"; private static DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser(); @@ -103,19 +103,7 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil PARSER = new ObjectParser<>(DateHistogramAggregationBuilder.NAME); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true); - PARSER.declareField((histogram, interval) -> { - if (interval instanceof Long) { - histogram.interval((long) interval); - } else { - histogram.dateHistogramInterval((DateHistogramInterval) interval); - } - }, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } else { - return new DateHistogramInterval(p.text()); - } - }, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG); + DateIntervalWrapper.declareIntervalFields(PARSER); PARSER.declareField(DateHistogramAggregationBuilder::offset, p -> { if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { @@ -140,8 +128,7 @@ public static DateHistogramAggregationBuilder parse(String aggregationName, XCon return PARSER.parse(parser, new DateHistogramAggregationBuilder(aggregationName), null); } - private long interval; - private DateHistogramInterval dateHistogramInterval; + private DateIntervalWrapper dateHistogramInterval = new DateIntervalWrapper(); private long offset = 0; private ExtendedBounds extendedBounds; private BucketOrder order = BucketOrder.key(true); @@ -156,7 +143,6 @@ public DateHistogramAggregationBuilder(String name) { protected DateHistogramAggregationBuilder(DateHistogramAggregationBuilder clone, Builder factoriesBuilder, Map metaData) { super(clone, factoriesBuilder, metaData); - this.interval = clone.interval; this.dateHistogramInterval = clone.dateHistogramInterval; this.offset = clone.offset; this.extendedBounds = clone.extendedBounds; @@ -176,8 +162,7 @@ public DateHistogramAggregationBuilder(StreamInput in) throws IOException { order = InternalOrder.Streams.readHistogramOrder(in, true); keyed = in.readBoolean(); minDocCount = in.readVLong(); - interval = in.readLong(); - dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); + dateHistogramInterval = new DateIntervalWrapper(in); offset = in.readLong(); extendedBounds = in.readOptionalWriteable(ExtendedBounds::new); } @@ -187,44 +172,97 @@ protected void innerWriteTo(StreamOutput out) throws IOException { InternalOrder.Streams.writeHistogramOrder(order, out, true); out.writeBoolean(keyed); out.writeVLong(minDocCount); - out.writeLong(interval); - out.writeOptionalWriteable(dateHistogramInterval); + dateHistogramInterval.writeTo(out); out.writeLong(offset); out.writeOptionalWriteable(extendedBounds); } /** Get the current interval in milliseconds that is set on this builder. */ + @Deprecated public long interval() { - return interval; + return dateHistogramInterval.interval(); } /** Set the interval on this builder, and return the builder so that calls can be chained. * If both {@link #interval()} and {@link #dateHistogramInterval()} are set, then the - * {@link #dateHistogramInterval()} wins. */ + * {@link #dateHistogramInterval()} wins. + * + * @deprecated use {@link #fixedInterval(DateHistogramInterval)} or {@link #calendarInterval(DateHistogramInterval)} instead + * @since 7.2.0 + */ + @Deprecated public DateHistogramAggregationBuilder interval(long interval) { - if (interval < 1) { - throw new IllegalArgumentException("[interval] must be 1 or greater for histogram aggregation [" + name + "]"); - } - this.interval = interval; + dateHistogramInterval.interval(interval); return this; } /** Get the current date interval that is set on this builder. */ + @Deprecated public DateHistogramInterval dateHistogramInterval() { - return dateHistogramInterval; + return dateHistogramInterval.dateHistogramInterval(); } /** Set the interval on this builder, and return the builder so that calls can be chained. * If both {@link #interval()} and {@link #dateHistogramInterval()} are set, then the - * {@link #dateHistogramInterval()} wins. */ - public DateHistogramAggregationBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) { - if (dateHistogramInterval == null) { - throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [" + name + "]"); - } - this.dateHistogramInterval = dateHistogramInterval; + * {@link #dateHistogramInterval()} wins. + * + * @deprecated use {@link #fixedInterval(DateHistogramInterval)} or {@link #calendarInterval(DateHistogramInterval)} instead + * @since 7.2.0 + */ + @Deprecated + public DateHistogramAggregationBuilder dateHistogramInterval(DateHistogramInterval interval) { + dateHistogramInterval.dateHistogramInterval(interval); + return this; + } + + /** + * Sets the interval of the DateHistogram using calendar units (`1d`, `1w`, `1M`, etc). These units + * are calendar-aware, meaning they respect leap additions, variable days per month, etc. + * + * This is mutually exclusive with {@link DateHistogramAggregationBuilder#fixedInterval(DateHistogramInterval)} + * + * @param interval The calendar interval to use with the aggregation + */ + public DateHistogramAggregationBuilder calendarInterval(DateHistogramInterval interval) { + dateHistogramInterval.calendarInterval(interval); + return this; + } + + /** + * Sets the interval of the DateHistogram using fixed units (`1ms`, `1s`, `10m`, `4h`, etc). These are + * not calendar aware and are simply multiples of fixed, SI units. + * + * This is mutually exclusive with {@link DateHistogramAggregationBuilder#calendarInterval(DateHistogramInterval)} + * + * @param interval The fixed interval to use with the aggregation + */ + public DateHistogramAggregationBuilder fixedInterval(DateHistogramInterval interval) { + dateHistogramInterval.fixedInterval(interval); return this; } + /** + * Returns the interval as a date time unit if and only if it was configured as a calendar interval originally. + * Returns null otherwise. + */ + public DateHistogramInterval getCalendarInterval() { + if (dateHistogramInterval.getIntervalType().equals(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)) { + return dateHistogramInterval.getAsCalendarInterval(); + } + return null; + } + + /** + * Returns the interval as a fixed time unit if and only if it was configured as a fixed interval originally. + * Returns null otherwise. + */ + public DateHistogramInterval getFixedInterval() { + if (dateHistogramInterval.getIntervalType().equals(DateIntervalWrapper.IntervalTypeEnum.FIXED)) { + return dateHistogramInterval.getAsFixedInterval(); + } + return null; + } + /** Get the offset to use when rounding, which is a number of milliseconds. */ public long offset() { return offset; @@ -338,11 +376,7 @@ public DateHistogramAggregationBuilder minDocCount(long minDocCount) { @Override protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (dateHistogramInterval == null) { - builder.field(Histogram.INTERVAL_FIELD.getPreferredName(), interval); - } else { - builder.field(Histogram.INTERVAL_FIELD.getPreferredName(), dateHistogramInterval.toString()); - } + dateHistogramInterval.toXContent(builder, params); builder.field(Histogram.OFFSET_FIELD.getPreferredName(), offset); if (order != null) { @@ -412,13 +446,26 @@ ZoneId rewriteTimeZone(QueryShardContext context) throws IOException { // We need all not only values but also rounded values to be within // [prevTransition, nextTransition]. final long low; - Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit(); - if (intervalAsUnit != null) { - Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); + + + DateIntervalWrapper.IntervalTypeEnum intervalType = dateHistogramInterval.getIntervalType(); + if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.FIXED)) { + low = Math.addExact(prevTransition, dateHistogramInterval.tryIntervalAsFixedUnit().millis()); + } else if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)) { + final Rounding.DateTimeUnit intervalAsUnit = dateHistogramInterval.tryIntervalAsCalendarUnit(); + final Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); low = rounding.nextRoundingValue(prevTransition); } else { - final TimeValue intervalAsMillis = getIntervalAsTimeValue(); - low = Math.addExact(prevTransition, intervalAsMillis.millis()); + // We're not sure what the interval was originally (legacy) so use old behavior of assuming + // calendar first, then fixed. Required because fixed/cal overlap in places ("1h") + Rounding.DateTimeUnit intervalAsUnit = dateHistogramInterval.tryIntervalAsCalendarUnit(); + if (intervalAsUnit != null) { + final Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); + low = rounding.nextRoundingValue(prevTransition); + } else { + final TimeValue intervalAsMillis = dateHistogramInterval.tryIntervalAsFixedUnit(); + low = Math.addExact(prevTransition, intervalAsMillis.millis()); + } } // rounding rounds down, so 'nextTransition' is a good upper bound final long high = nextTransition; @@ -440,13 +487,13 @@ ZoneId rewriteTimeZone(QueryShardContext context) throws IOException { protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { final ZoneId tz = timeZone(); - final Rounding rounding = createRounding(tz); + final Rounding rounding = dateHistogramInterval.createRounding(tz); final ZoneId rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext()); final Rounding shardRounding; if (tz == rewrittenTimeZone) { shardRounding = rounding; } else { - shardRounding = createRounding(rewrittenTimeZone); + shardRounding = dateHistogramInterval.createRounding(rewrittenTimeZone); } ExtendedBounds roundedBounds = null; @@ -458,47 +505,9 @@ ZoneId rewriteTimeZone(QueryShardContext context) throws IOException { rounding, shardRounding, roundedBounds, context, parent, subFactoriesBuilder, metaData); } - /** Return the interval as a date time unit if applicable. If this returns - * {@code null} then it means that the interval is expressed as a fixed - * {@link TimeValue} and may be accessed via - * {@link #getIntervalAsTimeValue()}. */ - private Rounding.DateTimeUnit getIntervalAsDateTimeUnit() { - if (dateHistogramInterval != null) { - return DATE_FIELD_UNITS.get(dateHistogramInterval.toString()); - } - return null; - } - - /** - * Get the interval as a {@link TimeValue}. Should only be called if - * {@link #getIntervalAsDateTimeUnit()} returned {@code null}. - */ - private TimeValue getIntervalAsTimeValue() { - if (dateHistogramInterval != null) { - return TimeValue.parseTimeValue(dateHistogramInterval.toString(), null, getClass().getSimpleName() + ".interval"); - } else { - return TimeValue.timeValueMillis(interval); - } - } - - private Rounding createRounding(ZoneId timeZone) { - Rounding.Builder tzRoundingBuilder; - Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit(); - if (intervalAsUnit != null) { - tzRoundingBuilder = Rounding.builder(intervalAsUnit); - } else { - tzRoundingBuilder = Rounding.builder(getIntervalAsTimeValue()); - } - if (timeZone != null) { - tzRoundingBuilder.timeZone(timeZone); - } - Rounding rounding = tzRoundingBuilder.build(); - return rounding; - } - @Override protected int innerHashCode() { - return Objects.hash(order, keyed, minDocCount, interval, dateHistogramInterval, minDocCount, extendedBounds); + return Objects.hash(order, keyed, minDocCount, dateHistogramInterval, minDocCount, extendedBounds); } @Override @@ -507,7 +516,6 @@ protected boolean innerEquals(Object obj) { return Objects.equals(order, other.order) && Objects.equals(keyed, other.keyed) && Objects.equals(minDocCount, other.minDocCount) - && Objects.equals(interval, other.interval) && Objects.equals(dateHistogramInterval, other.dateHistogramInterval) && Objects.equals(offset, other.offset) && Objects.equals(extendedBounds, other.extendedBounds); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java index c01a1190ff381..08a4a3bf76faf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java @@ -19,9 +19,12 @@ package org.elasticsearch.search.aggregations.bucket.histogram; +import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -107,4 +110,21 @@ public boolean equals(Object obj) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.value(toString()); } + + /** + * Converts this DateHistogramInterval into a millisecond representation. If this is a calendar + * interval, it is an approximation of milliseconds based on the fixed equivalent (e.g. `1h` is treated as 60 + * fixed minutes, rather than the hour at a specific point in time. + * + * This is merely a convenience helper for quick comparisons and should not be used for situations that + * require precise durations. + */ + public long estimateMillis() { + if (Strings.isNullOrEmpty(expression) == false && DateHistogramAggregationBuilder.DATE_FIELD_UNITS.containsKey(expression)) { + Rounding.DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expression); + return intervalUnit.getField().getBaseUnit().getDuration().getSeconds() * 1000; + } else { + return TimeValue.parseTimeValue(expression, "DateHistogramInterval#estimateMillis").getMillis(); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java new file mode 100644 index 0000000000000..a53369e2a376c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalConsumer.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +/** + * A shared interface for aggregations that parse and use "interval" parameters. + * + * Provides definitions for the new fixed and calendar intervals, and deprecated + * defintions for the old interval/dateHisto interval parameters + */ +public interface DateIntervalConsumer { + @Deprecated + T interval(long interval); + @Deprecated + T dateHistogramInterval(DateHistogramInterval dateHistogramInterval); + T calendarInterval(DateHistogramInterval interval); + T fixedInterval(DateHistogramInterval interval); + + @Deprecated + long interval(); + @Deprecated + DateHistogramInterval dateHistogramInterval(); +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java new file mode 100644 index 0000000000000..b86989fce168d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java @@ -0,0 +1,423 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.Rounding.DateTimeUnit; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.time.ZoneId; +import java.util.Locale; +import java.util.Objects; + +/** + * A class that handles all the parsing, bwc and deprecations surrounding date histogram intervals. + * + * - Provides parser helpers for the deprecated interval/dateHistogramInterval parameters. + * - Provides parser helpers for the new calendar/fixed interval parameters + * - Can read old intervals from a stream and convert to new intervals + * - Can write new intervals to old format when streaming out + * - Provides a variety of helper methods to interpret the intervals as different types, depending on caller's need + * + * After the deprecated parameters are removed, this class can be simplified greatly. The legacy options + * will be removed, and the mutual-exclusion checks can be done in the setters directly removing the need + * for the enum and the complicated "state machine" logic + */ +public class DateIntervalWrapper implements ToXContentFragment, Writeable { + private static final DeprecationLogger DEPRECATION_LOGGER + = new DeprecationLogger(LogManager.getLogger(DateHistogramAggregationBuilder.class)); + private static final String DEPRECATION_TEXT = "[interval] on [date_histogram] is deprecated, use [fixed_interval] or " + + "[calendar_interval] in the future."; + + private static final ParseField FIXED_INTERVAL_FIELD = new ParseField("fixed_interval"); + private static final ParseField CALENDAR_INTERVAL_FIELD = new ParseField("calendar_interval"); + + public enum IntervalTypeEnum implements Writeable { + NONE, FIXED, CALENDAR, LEGACY_INTERVAL, LEGACY_DATE_HISTO; + + public static IntervalTypeEnum fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + public static IntervalTypeEnum fromStream(StreamInput in) throws IOException { + return in.readEnum(IntervalTypeEnum.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(this); + } + + public String value() { + return name().toLowerCase(Locale.ROOT); + } + } + + private DateHistogramInterval dateHistogramInterval; + private IntervalTypeEnum intervalType = IntervalTypeEnum.NONE; + + public static void declareIntervalFields(ObjectParser parser) { + + // NOTE: this field is deprecated and will be removed + parser.declareField((wrapper, interval) -> { + if (interval instanceof Long) { + wrapper.interval((long) interval); + } else { + wrapper.dateHistogramInterval((DateHistogramInterval) interval); + } + }, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.longValue(); + } else { + return new DateHistogramInterval(p.text()); + } + }, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG); + + parser.declareField(DateIntervalConsumer::calendarInterval, + p -> new DateHistogramInterval(p.text()), CALENDAR_INTERVAL_FIELD, ObjectParser.ValueType.STRING); + + parser.declareField(DateIntervalConsumer::fixedInterval, + p -> new DateHistogramInterval(p.text()), FIXED_INTERVAL_FIELD, ObjectParser.ValueType.STRING); + } + + public DateIntervalWrapper() {} + + public DateIntervalWrapper(StreamInput in) throws IOException { + if (in.getVersion().before(Version.V_8_0_0)) { // TODO change this after backport + long interval = in.readLong(); + DateHistogramInterval histoInterval = in.readOptionalWriteable(DateHistogramInterval::new); + + if (histoInterval != null) { + dateHistogramInterval = histoInterval; + intervalType = IntervalTypeEnum.LEGACY_DATE_HISTO; + } else { + dateHistogramInterval = new DateHistogramInterval(interval + "ms"); + intervalType = IntervalTypeEnum.LEGACY_INTERVAL; + } + } else { + dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); + intervalType = IntervalTypeEnum.fromStream(in); + } + } + + public IntervalTypeEnum getIntervalType() { + return intervalType; + } + + /** Get the current interval in milliseconds that is set on this builder. */ + @Deprecated + public long interval() { + DEPRECATION_LOGGER.deprecated(DEPRECATION_TEXT); + if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL)) { + return TimeValue.parseTimeValue(dateHistogramInterval.toString(), "interval").getMillis(); + } + return 0; + } + + /** Set the interval on this builder, and return the builder so that calls can be chained. + * If both {@link #interval()} and {@link #dateHistogramInterval()} are set, then the + * {@link #dateHistogramInterval()} wins. + * + * @deprecated use {@link DateHistogramAggregationBuilder#fixedInterval(DateHistogramInterval)} + * or {@link DateHistogramAggregationBuilder#calendarInterval(DateHistogramInterval)} instead + * @since 7.2.0 + */ + @Deprecated + public void interval(long interval) { + if (interval < 1) { + throw new IllegalArgumentException("[interval] must be 1 or greater for aggregation [date_histogram]"); + } + setIntervalType(IntervalTypeEnum.LEGACY_INTERVAL); + DEPRECATION_LOGGER.deprecated(DEPRECATION_TEXT); + this.dateHistogramInterval = new DateHistogramInterval(interval + "ms"); + } + + /** Get the current date interval that is set on this builder. */ + @Deprecated + public DateHistogramInterval dateHistogramInterval() { + DEPRECATION_LOGGER.deprecated(DEPRECATION_TEXT); + if (intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO)) { + return dateHistogramInterval; + } + return null; + } + + /** Set the interval on this builder, and return the builder so that calls can be chained. + * If both {@link #interval()} and {@link #dateHistogramInterval()} are set, then the + * {@link #dateHistogramInterval()} wins. + * + * @deprecated use {@link DateIntervalWrapper#fixedInterval(DateHistogramInterval)} + * or {@link DateIntervalWrapper#calendarInterval(DateHistogramInterval)} instead + * @since 7.2.0 + */ + @Deprecated + public void dateHistogramInterval(DateHistogramInterval dateHistogramInterval) { + if (dateHistogramInterval == null || Strings.isNullOrEmpty(dateHistogramInterval.toString())) { + throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [date_histogram]"); + } + setIntervalType(IntervalTypeEnum.LEGACY_DATE_HISTO); + DEPRECATION_LOGGER.deprecated(DEPRECATION_TEXT); + this.dateHistogramInterval = dateHistogramInterval; + } + + /** + * Returns the interval as a calendar interval. Throws an exception if the value cannot be converted + * into a calendar interval + */ + public DateHistogramInterval getAsCalendarInterval() { + if (intervalType.equals(IntervalTypeEnum.CALENDAR) || tryIntervalAsCalendarUnit() != null) { + return dateHistogramInterval; + } + throw new IllegalStateException("Cannot convert [" + intervalType.toString() + "] interval type into calendar interval"); + } + + /** + * Sets the interval of the DateHistogram using calendar units (`1d`, `1w`, `1M`, etc). These units + * are calendar-aware, meaning they respect leap additions, variable days per month, etc. + * + * This is mutually exclusive with {@link DateIntervalWrapper#fixedInterval(DateHistogramInterval)} + * + * @param interval The fixed interval to use + */ + public void calendarInterval(DateHistogramInterval interval) { + if (interval == null || Strings.isNullOrEmpty(interval.toString())) { + throw new IllegalArgumentException("[interval] must not be null: [date_histogram]"); + } + if (DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()) == null) { + throw new IllegalArgumentException("The supplied interval [" + interval +"] could not be parsed " + + "as a calendar interval."); + } + setIntervalType(IntervalTypeEnum.CALENDAR); + this.dateHistogramInterval = interval; + } + + /** + * Returns the interval as a Fixed interval. Throws an exception if the value cannot be converted + * into a fixed interval + */ + public DateHistogramInterval getAsFixedInterval() { + if (intervalType.equals(IntervalTypeEnum.FIXED) || tryIntervalAsFixedUnit() != null) { + return dateHistogramInterval; + } + throw new IllegalStateException("Cannot convert [" + intervalType.toString() + "] interval type into fixed interval"); + } + + /** + * Sets the interval of the DateHistogram using fixed units (`1ms`, `1s`, `10m`, `4h`, etc). These are + * not calendar aware and are simply multiples of fixed, SI units. + * + * This is mutually exclusive with {@link DateIntervalWrapper#calendarInterval(DateHistogramInterval)} + * + * @param interval The fixed interval to use + */ + public void fixedInterval(DateHistogramInterval interval) { + if (interval == null || Strings.isNullOrEmpty(interval.toString())) { + throw new IllegalArgumentException("[interval] must not be null: [date_histogram]"); + } + setIntervalType(IntervalTypeEnum.FIXED); + // Parse to make sure it is a valid fixed too + TimeValue.parseTimeValue(interval.toString(), DateHistogramAggregationBuilder.NAME + ".fixedInterval"); + this.dateHistogramInterval = interval; + } + + /** Return the interval as a date time unit if applicable, regardless of how it was configured. If this returns + * {@code null} then it means that the interval is expressed as a fixed + * {@link TimeValue} and may be accessed via {@link #tryIntervalAsFixedUnit()}. */ + DateTimeUnit tryIntervalAsCalendarUnit() { + if (intervalType.equals(IntervalTypeEnum.CALENDAR) || intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO)) { + return DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(dateHistogramInterval.toString()); + } + return null; + } + + /** + * Get the interval as a {@link TimeValue}, regardless of how it was configured. Returns null if + * the interval cannot be parsed as a fixed time. + */ + TimeValue tryIntervalAsFixedUnit() { + if (dateHistogramInterval == null || Strings.isNullOrEmpty(dateHistogramInterval.toString())) { + return null; + } + try { + return TimeValue.parseTimeValue(dateHistogramInterval.toString(), null, getClass().getSimpleName() + ".interval"); + } catch (IllegalArgumentException e) { + return null; + } + } + + public Rounding createRounding(ZoneId timeZone) { + Rounding.Builder tzRoundingBuilder; + if (isEmpty()) { + throw new IllegalArgumentException("Invalid interval specified, must be non-null and non-empty"); + } + DateIntervalWrapper.IntervalTypeEnum intervalType = getIntervalType(); + if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.FIXED)) { + tzRoundingBuilder = Rounding.builder(tryIntervalAsFixedUnit()); + } else if (intervalType.equals(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)) { + tzRoundingBuilder = Rounding.builder(tryIntervalAsCalendarUnit()); + } else { + // We're not sure what the interval was originally (legacy) so use old behavior of assuming + // calendar first, then fixed. Required because fixed/cal overlap in places ("1h") + DateTimeUnit intervalAsUnit = tryIntervalAsCalendarUnit(); + if (intervalAsUnit != null) { + tzRoundingBuilder = Rounding.builder(tryIntervalAsCalendarUnit()); + } else { + tzRoundingBuilder = Rounding.builder(tryIntervalAsFixedUnit()); + } + } + if (timeZone != null) { + tzRoundingBuilder.timeZone(timeZone); + } + return tzRoundingBuilder.build(); + } + + private void setIntervalType(IntervalTypeEnum type) { + // If we're the same or have no existing type, just use the provided type + if (intervalType.equals(IntervalTypeEnum.NONE) || type.equals(intervalType)) { + intervalType = type; + return; + } + + // interval() method + switch (type) { + case LEGACY_INTERVAL: + if (intervalType.equals(IntervalTypeEnum.CALENDAR) || intervalType.equals(IntervalTypeEnum.FIXED)) { + throw new IllegalArgumentException("Cannot use [interval] with [fixed_interval] or [calendar_interval] " + + "configuration options."); + } + + // dateHistogramInterval() takes precedence over interval() + if (intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO) == false) { + intervalType = IntervalTypeEnum.LEGACY_INTERVAL; + } + break; + + case LEGACY_DATE_HISTO: + if (intervalType.equals(IntervalTypeEnum.CALENDAR) || intervalType.equals(IntervalTypeEnum.FIXED)) { + throw new IllegalArgumentException("Cannot use [interval] with [fixed_interval] or [calendar_interval] " + + "configuration options."); + } + + // dateHistogramInterval() takes precedence over interval() + intervalType = IntervalTypeEnum.LEGACY_DATE_HISTO; + break; + + case FIXED: + if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL) || intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO)) { + throw new IllegalArgumentException("Cannot use [fixed_interval] with [interval] " + + "configuration option."); + } + if (intervalType.equals(IntervalTypeEnum.CALENDAR)) { + throw new IllegalArgumentException("Cannot use [fixed_interval] with [calendar_interval] " + + "configuration option."); + } + intervalType = IntervalTypeEnum.FIXED; + break; + + case CALENDAR: + if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL) || intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO)) { + throw new IllegalArgumentException("Cannot use [calendar_interval] with [interval] " + + "configuration option."); + } + if (intervalType.equals(IntervalTypeEnum.FIXED)) { + throw new IllegalArgumentException("Cannot use [calendar_interval] with [fixed_interval] " + + "configuration option."); + } + intervalType = IntervalTypeEnum.CALENDAR; + break; + + default: + throw new IllegalStateException("Unknown interval type."); + } + } + + public boolean isEmpty() { + if (intervalType.equals(IntervalTypeEnum.NONE)) { + return true; + } + return dateHistogramInterval == null || Strings.isNullOrEmpty(dateHistogramInterval.toString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (out.getVersion().before(Version.V_8_0_0)) { // TODO change this after backport + if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL)) { + out.writeLong(TimeValue.parseTimeValue(dateHistogramInterval.toString(), + DateHistogramAggregationBuilder.NAME + ".innerWriteTo").getMillis()); + } else { + out.writeLong(0L); + } + out.writeOptionalWriteable(dateHistogramInterval); + } else { + out.writeOptionalWriteable(dateHistogramInterval); + intervalType.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (intervalType.equals(IntervalTypeEnum.LEGACY_DATE_HISTO) || intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL)) { + builder.field(Histogram.INTERVAL_FIELD.getPreferredName(), dateHistogramInterval.toString()); + } else if (intervalType.equals(IntervalTypeEnum.FIXED)){ + builder.field(FIXED_INTERVAL_FIELD.getPreferredName(), dateHistogramInterval.toString()); + } else if (intervalType.equals(IntervalTypeEnum.CALENDAR)) { + builder.field(CALENDAR_INTERVAL_FIELD.getPreferredName(), dateHistogramInterval.toString()); + } + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + + final DateIntervalWrapper that = (DateIntervalWrapper) other; + if (tryIntervalAsCalendarUnit() != null && that.tryIntervalAsCalendarUnit() == null) { + return false; + } + if (tryIntervalAsCalendarUnit() == null && that.tryIntervalAsCalendarUnit() != null) { + return false; + } + return Objects.equals(this.dateHistogramInterval, that.dateHistogramInterval); + } + + @Override + public int hashCode() { + boolean isCalendar = tryIntervalAsCalendarUnit() != null; + return Objects.hash(dateHistogramInterval, isCalendar); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 7a4e0fb705918..1fd8580e29027 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -148,7 +148,7 @@ public void testMissingName() throws Exception { .startObject("by_date") .startObject("date_histogram") .field("field", "timestamp") - .field("interval", "month") + .field("calendar_interval", "month") .endObject() .startObject("aggs") // the aggregation name is missing @@ -172,7 +172,7 @@ public void testMissingType() throws Exception { .startObject("by_date") .startObject("date_histogram") .field("field", "timestamp") - .field("interval", "month") + .field("calendar_interval", "month") .endObject() .startObject("aggs") .startObject("tag_count") diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java index 40ac3e49f3a65..1b0fbf5bbcd80 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -158,7 +158,7 @@ public void testHistogram() { public void testDateHistogram() { SearchResponse response = client().prepareSearch("idx") .addAggregation( - dateHistogram("my_histogram").field("date").dateHistogramInterval(DateHistogramInterval.YEAR).missing("2014-05-07")) + dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07")) .get(); assertSearchResponse(response); Histogram histogram = response.getAggregations().get("my_histogram"); @@ -170,7 +170,7 @@ public void testDateHistogram() { response = client().prepareSearch("idx") .addAggregation( - dateHistogram("my_histogram").field("date").dateHistogramInterval(DateHistogramInterval.YEAR).missing("2015-05-07")) + dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07")) .get(); assertSearchResponse(response); histogram = response.getAggregations().get("my_histogram"); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index eafd88328b799..ad2939347edb1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -1359,7 +1359,7 @@ public void testExceptionOnNegativeInterval() { .addAggregation(dateHistogram("histo").field("date").interval(-TimeUnit.DAYS.toMillis(1)).minDocCount(0)).get(); fail(); } catch (IllegalArgumentException e) { - assertThat(e.toString(), containsString("[interval] must be 1 or greater for histogram aggregation [histo]")); + assertThat(e.toString(), containsString("[interval] must be 1 or greater for aggregation [date_histogram]")); } } @@ -1433,7 +1433,7 @@ public void testDSTEndTransition() throws Exception { SearchResponse response = client().prepareSearch("idx") .setQuery(new MatchNoneQueryBuilder()) .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo")) - .dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds( + .calendarInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds( new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00"))) .get(); @@ -1446,6 +1446,23 @@ public void testDSTEndTransition() throws Exception { ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() - ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); + + response = client().prepareSearch("idx") + .setQuery(new MatchNoneQueryBuilder()) + .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo")) + .dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds( + new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00"))) + .get(); + + histo = response.getAggregations().get("histo"); + buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(4)); + assertThat(((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() - + ((ZonedDateTime) buckets.get(0).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); + assertThat(((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() - + ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); + assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() - + ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java index d31f7a89b462e..08b8cb13a3377 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java @@ -36,10 +36,11 @@ private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() { histo.script(new Script(randomAlphaOfLengthBetween(10, 20))); } if (randomBoolean()) { - histo.dateHistogramInterval(randomFrom(DateHistogramInterval.days(10), + histo.calendarInterval(randomFrom(DateHistogramInterval.days(1), DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1))); } else { - histo.interval(randomNonNegativeLong()); + histo.fixedInterval(randomFrom(new DateHistogramInterval(randomNonNegativeLong() + "ms"), + DateHistogramInterval.days(10), DateHistogramInterval.hours(10))); } if (randomBoolean()) { histo.timeZone(randomZone()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 02c53f3bd4164..706638db1db02 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -1033,6 +1033,8 @@ public void testWithDateHistogram() throws IOException { assertEquals(2L, result.getBuckets().get(1).getDocCount()); } ); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } public void testWithDateTerms() throws IOException { @@ -1126,6 +1128,8 @@ public void testWithDateHistogramAndFormat() throws IOException { assertEquals(2L, result.getBuckets().get(1).getDocCount()); } ); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } public void testThatDateHistogramFailsFormatAfter() throws IOException { @@ -1157,6 +1161,7 @@ public void testThatDateHistogramFailsFormatAfter() throws IOException { (result) -> {} )); assertThat(exc.getMessage(), containsString("failed to parse date field [1474329600000]")); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } public void testWithDateHistogramAndTimeZone() throws IOException { @@ -1209,6 +1214,8 @@ public void testWithDateHistogramAndTimeZone() throws IOException { assertEquals(2L, result.getBuckets().get(1).getDocCount()); } ); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } public void testWithDateHistogramAndKeyword() throws IOException { @@ -1286,6 +1293,8 @@ public void testWithDateHistogramAndKeyword() throws IOException { assertEquals(1L, result.getBuckets().get(2).getDocCount()); } ); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } public void testWithKeywordAndHistogram() throws IOException { @@ -1482,6 +1491,8 @@ public void testWithKeywordAndDateHistogram() throws IOException { assertEquals(1L, result.getBuckets().get(3).getDocCount()); } ); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } public void testWithKeywordAndTopHits() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/LegacyIntervalCompositeAggBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/LegacyIntervalCompositeAggBuilderTests.java new file mode 100644 index 0000000000000..aab225ddf8e7b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/LegacyIntervalCompositeAggBuilderTests.java @@ -0,0 +1,155 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.composite; + +import org.elasticsearch.script.Script; +import org.elasticsearch.search.aggregations.BaseAggregationTestCase; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.search.sort.SortOrder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Duplicates the tests from {@link CompositeAggregationBuilderTests}, except using the deprecated + * interval on date histo. Separated to make testing the warnings easier. + * + * Can be removed in when the legacy interval options are gone + */ +public class LegacyIntervalCompositeAggBuilderTests extends BaseAggregationTestCase { + + private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() { + DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10)); + if (randomBoolean()) { + histo.field(randomAlphaOfLengthBetween(1, 20)); + } else { + histo.script(new Script(randomAlphaOfLengthBetween(10, 20))); + } + if (randomBoolean()) { + histo.dateHistogramInterval(randomFrom(DateHistogramInterval.days(1), + DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1))); + } else { + histo.interval(randomNonNegativeLong()); + } + if (randomBoolean()) { + histo.timeZone(randomZone()); + } + if (randomBoolean()) { + histo.missingBucket(true); + } + return histo; + } + + private TermsValuesSourceBuilder randomTermsSourceBuilder() { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10)); + if (randomBoolean()) { + terms.field(randomAlphaOfLengthBetween(1, 20)); + } else { + terms.script(new Script(randomAlphaOfLengthBetween(10, 20))); + } + terms.order(randomFrom(SortOrder.values())); + if (randomBoolean()) { + terms.missingBucket(true); + } + return terms; + } + + private HistogramValuesSourceBuilder randomHistogramSourceBuilder() { + HistogramValuesSourceBuilder histo = new HistogramValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10)); + if (randomBoolean()) { + histo.field(randomAlphaOfLengthBetween(1, 20)); + } else { + histo.script(new Script(randomAlphaOfLengthBetween(10, 20))); + } + if (randomBoolean()) { + histo.missingBucket(true); + } + histo.interval(randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false)); + return histo; + } + + @Override + protected CompositeAggregationBuilder createTestAggregatorBuilder() { + int numSources = randomIntBetween(1, 10); + List> sources = new ArrayList<>(); + // ensure we add at least one date histo + sources.add(randomDateHistogramSourceBuilder()); + for (int i = 0; i < numSources; i++) { + int type = randomIntBetween(0, 2); + switch (type) { + case 0: + sources.add(randomTermsSourceBuilder()); + break; + case 1: + sources.add(randomDateHistogramSourceBuilder()); + break; + case 2: + sources.add(randomHistogramSourceBuilder()); + break; + default: + throw new AssertionError("wrong branch"); + } + } + return new CompositeAggregationBuilder(randomAlphaOfLength(10), sources); + } + + @Override + public void testFromXContent() throws IOException { + super.testFromXContent(); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + @Override + public void testFromXContentMulti() throws IOException { + super.testFromXContentMulti(); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + @Override + public void testSerializationMulti() throws IOException { + super.testSerializationMulti(); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + @Override + public void testToString() throws IOException { + super.testToString(); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + @Override + public void testSerialization() throws IOException { + super.testSerialization(); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + @Override + public void testEqualsAndHashcode() throws IOException { + super.testEqualsAndHashcode(); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + @Override + public void testShallowCopy() { + super.testShallowCopy(); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 3ce74b04e23b8..f671b21eb5e9b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -34,15 +34,19 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.MultiBucketConsumerService.TooManyBucketsException; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Consumer; +import static org.hamcrest.Matchers.equalTo; + public class DateHistogramAggregatorTests extends AggregatorTestCase { private static final String DATE_FIELD = "date"; @@ -60,7 +64,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase { "2016-03-04T17:09:50", "2017-12-12T22:55:46"); - public void testMatchNoDocs() throws IOException { + public void testMatchNoDocsDeprecatedInterval() throws IOException { testBothCases(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> { @@ -68,9 +72,21 @@ public void testMatchNoDocs() throws IOException { assertFalse(AggregationInspectionHelper.hasValue(histogram)); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testMatchAllDocs() throws IOException { + public void testMatchNoDocs() throws IOException { + testBothCases(new MatchNoDocsQuery(), dataset, + aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + testBothCases(new MatchNoDocsQuery(), dataset, + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + } + + public void testMatchAllDocsDeprecatedInterval() throws IOException { Query query = new MatchAllDocsQuery(); testSearchCase(query, dataset, @@ -94,9 +110,49 @@ public void testMatchAllDocs() throws IOException { assertTrue(AggregationInspectionHelper.hasValue(histogram)); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testNoDocs() throws IOException { + public void testMatchAllDocs() throws IOException { + Query query = new MatchAllDocsQuery(); + + List foo = new ArrayList<>(); + for (int i = 0; i < 1000; i++) { + foo.add(dataset.get(randomIntBetween(0, dataset.size()-1))); + } + testSearchAndReduceCase(query, foo, + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD).order(BucketOrder.count(false)), + histogram -> assertEquals(8, histogram.getBuckets().size()) + ); + + testSearchCase(query, dataset, + aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), + histogram -> assertEquals(6, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dataset, + aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), + histogram -> assertEquals(8, histogram.getBuckets().size()) + ); + testBothCases(query, dataset, + aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD).minDocCount(1L), + histogram -> assertEquals(6, histogram.getBuckets().size()) + ); + + testSearchCase(query, dataset, + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD), + histogram -> assertEquals(6, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dataset, + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD), + histogram -> assertEquals(8, histogram.getBuckets().size()) + ); + testBothCases(query, dataset, + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD).minDocCount(1L), + histogram -> assertEquals(6, histogram.getBuckets().size()) + ); + } + + public void testNoDocsDeprecatedInterval() throws IOException { Query query = new MatchNoDocsQuery(); List dates = Collections.emptyList(); Consumer aggregation = agg -> @@ -111,9 +167,32 @@ public void testNoDocs() throws IOException { testSearchAndReduceCase(query, dates, aggregation, histogram -> assertNull(histogram) ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testAggregateWrongField() throws IOException { + public void testNoDocs() throws IOException { + Query query = new MatchNoDocsQuery(); + List dates = Collections.emptyList(); + Consumer aggregation = agg -> + agg.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD); + testSearchCase(query, dates, aggregation, + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dates, aggregation, + histogram -> assertNull(histogram) + ); + + aggregation = agg -> + agg.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD); + testSearchCase(query, dates, aggregation, + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dates, aggregation, + histogram -> assertNull(histogram) + ); + } + + public void testAggregateWrongFieldDeprecated() throws IOException { testBothCases(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field("wrong_field"), histogram -> { @@ -121,9 +200,21 @@ public void testAggregateWrongField() throws IOException { assertFalse(AggregationInspectionHelper.hasValue(histogram)); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testIntervalYear() throws IOException { + public void testAggregateWrongField() throws IOException { + testBothCases(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field("wrong_field"), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + testBothCases(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field("wrong_field"), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + } + + public void testIntervalYearDeprecated() throws IOException { testBothCases(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> { @@ -143,9 +234,32 @@ public void testIntervalYear() throws IOException { assertEquals(1, bucket.getDocCount()); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testIntervalMonth() throws IOException { + public void testIntervalYear() throws IOException { + testBothCases(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, + aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + } + + public void testIntervalMonthDeprecated() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(DATE_FIELD), @@ -166,9 +280,33 @@ public void testIntervalMonth() throws IOException { assertEquals(3, bucket.getDocCount()); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testIntervalDay() throws IOException { + public void testIntervalMonth() throws IOException { + testBothCases(new MatchAllDocsQuery(), + Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.MONTH).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-03-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testIntervalDayDeprecated() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01", @@ -201,9 +339,77 @@ public void testIntervalDay() throws IOException { assertEquals(1, bucket.getDocCount()); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testIntervalHour() throws IOException { + public void testIntervalDay() throws IOException { + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(4, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("24h")).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(4, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + } + + public void testIntervalHourDeprecated() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", @@ -247,9 +453,99 @@ public void testIntervalHour() throws IOException { assertEquals(3, bucket.getDocCount()); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testIntervalMinute() throws IOException { + public void testIntervalHour() throws IOException { + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.HOUR).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(6, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T10:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T13:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T14:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("60m")).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(6, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T10:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T13:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T14:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testIntervalMinuteDeprecated() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", @@ -276,9 +572,65 @@ public void testIntervalMinute() throws IOException { assertEquals(2, bucket.getDocCount()); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testIntervalSecond() throws IOException { + public void testIntervalMinute() throws IOException { + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:35.000Z", + "2017-02-01T09:02:59.000Z", + "2017-02-01T09:15:37.000Z", + "2017-02-01T09:16:04.000Z", + "2017-02-01T09:16:42.000Z" + ), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.MINUTE).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + } + ); + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:35.000Z", + "2017-02-01T09:02:59.000Z", + "2017-02-01T09:15:37.000Z", + "2017-02-01T09:16:04.000Z", + "2017-02-01T09:16:42.000Z" + ), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("60s")).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + } + ); + } + + public void testIntervalSecondDeprecated() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", @@ -306,9 +658,67 @@ public void testIntervalSecond() throws IOException { assertEquals(3, bucket.getDocCount()); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } - public void testMinDocCount() throws IOException { + public void testIntervalSecond() throws IOException { + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T00:00:05.015Z", + "2017-02-01T00:00:11.299Z", + "2017-02-01T00:00:11.074Z", + "2017-02-01T00:00:37.688Z", + "2017-02-01T00:00:37.210Z", + "2017-02-01T00:00:37.380Z" + ), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.SECOND).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + testBothCases(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T00:00:05.015Z", + "2017-02-01T00:00:11.299Z", + "2017-02-01T00:00:11.074Z", + "2017-02-01T00:00:37.688Z", + "2017-02-01T00:00:37.210Z", + "2017-02-01T00:00:37.380Z" + ), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("1000ms")).field(DATE_FIELD).minDocCount(1L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testMinDocCountDeprecated() throws IOException { Query query = LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z")); List timestamps = Arrays.asList( "2017-02-01T00:00:05.015Z", @@ -355,6 +765,56 @@ public void testMinDocCount() throws IOException { assertEquals(3, bucket.getDocCount()); } ); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + public void testMinDocCount() throws IOException { + Query query = LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z")); + List timestamps = Arrays.asList( + "2017-02-01T00:00:05.015Z", + "2017-02-01T00:00:11.299Z", + "2017-02-01T00:00:11.074Z", + "2017-02-01T00:00:13.688Z", + "2017-02-01T00:00:21.380Z" + ); + + // 5 sec interval with minDocCount = 0 + testSearchAndReduceCase(query, timestamps, + aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(4, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T00:00:15.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T00:00:20.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + + // 5 sec interval with minDocCount = 3 + testSearchAndReduceCase(query, timestamps, + aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(3L), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(1, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); } public void testMaxBucket() throws IOException { @@ -365,6 +825,38 @@ public void testMaxBucket() throws IOException { "2017-01-01T00:00:00.000Z" ); + expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps, + aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), + histogram -> {}, 2)); + + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), + histogram -> {}, 2)); + + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), + histogram -> {}, 100)); + + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + aggregation -> + aggregation.fixedInterval(DateHistogramInterval.seconds(5)) + .field(DATE_FIELD) + .subAggregation( + AggregationBuilders.dateHistogram("1") + .fixedInterval(DateHistogramInterval.seconds(5)) + .field(DATE_FIELD) + ), + histogram -> {}, 5)); + } + + public void testMaxBucketDeprecated() throws IOException { + Query query = new MatchAllDocsQuery(); + List timestamps = Arrays.asList( + "2010-01-01T00:00:00.000Z", + "2011-01-01T00:00:00.000Z", + "2017-01-01T00:00:00.000Z" + ); + expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), histogram -> {}, 2)); @@ -387,6 +879,222 @@ public void testMaxBucket() throws IOException { .field(DATE_FIELD) ), histogram -> {}, 5)); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + + public void testFixedWithCalendar() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.fixedInterval(DateHistogramInterval.WEEK).field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("failed to parse setting [date_histogram.fixedInterval] with value [1w] as a time value: " + + "unit is missing or unrecognized")); + } + + public void testCalendarWithFixed() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.calendarInterval(new DateHistogramInterval("5d")).field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("The supplied interval [5d] could not be parsed as a calendar interval.")); + } + + public void testCalendarAndThenFixed() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .fixedInterval(new DateHistogramInterval("2d")) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [calendar_interval] configuration option.")); + } + + public void testFixedAndThenCalendar() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) + .calendarInterval(DateHistogramInterval.DAY) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [fixed_interval] configuration option.")); + } + + public void testNewThenLegacy() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) + .dateHistogramInterval(DateHistogramInterval.DAY) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); + + e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .dateHistogramInterval(DateHistogramInterval.DAY) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); + + e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) + .interval(1000) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); + + e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) + .interval(1000) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); + } + + public void testLegacyThenNew() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation .dateHistogramInterval(DateHistogramInterval.DAY) + .fixedInterval(new DateHistogramInterval("2d")) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [interval] configuration option.")); + + e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.DAY) + .calendarInterval(DateHistogramInterval.DAY) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [interval] configuration option.")); + + e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.interval(1000) + .fixedInterval(new DateHistogramInterval("2d")) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [interval] configuration option.")); + + e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.interval(1000) + .calendarInterval(DateHistogramInterval.DAY) + .field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [interval] configuration option.")); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } private void testSearchCase(Query query, List dataset, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java index 1a639552ae4be..38ed1776ec2c5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java @@ -45,29 +45,26 @@ protected DateHistogramAggregationBuilder createTestAggregatorBuilder() { DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(randomAlphaOfLengthBetween(3, 10)); factory.field(INT_FIELD_NAME); if (randomBoolean()) { - factory.interval(randomIntBetween(1, 100000)); + factory.fixedInterval(new DateHistogramInterval(randomIntBetween(1, 100000) + "ms")); } else { if (randomBoolean()) { - factory.dateHistogramInterval(randomFrom(DateHistogramInterval.YEAR, DateHistogramInterval.QUARTER, + factory.calendarInterval(randomFrom(DateHistogramInterval.YEAR, DateHistogramInterval.QUARTER, DateHistogramInterval.MONTH, DateHistogramInterval.WEEK, DateHistogramInterval.DAY, DateHistogramInterval.HOUR, DateHistogramInterval.MINUTE, DateHistogramInterval.SECOND)); } else { - int branch = randomInt(4); + int branch = randomInt(3); switch (branch) { case 0: - factory.dateHistogramInterval(DateHistogramInterval.seconds(randomIntBetween(1, 1000))); + factory.fixedInterval(DateHistogramInterval.seconds(randomIntBetween(1, 1000))); break; case 1: - factory.dateHistogramInterval(DateHistogramInterval.minutes(randomIntBetween(1, 1000))); + factory.fixedInterval(DateHistogramInterval.minutes(randomIntBetween(1, 1000))); break; case 2: - factory.dateHistogramInterval(DateHistogramInterval.hours(randomIntBetween(1, 1000))); + factory.fixedInterval(DateHistogramInterval.hours(randomIntBetween(1, 1000))); break; case 3: - factory.dateHistogramInterval(DateHistogramInterval.days(randomIntBetween(1, 1000))); - break; - case 4: - factory.dateHistogramInterval(DateHistogramInterval.weeks(randomIntBetween(1, 1000))); + factory.fixedInterval(DateHistogramInterval.days(randomIntBetween(1, 1000))); break; default: throw new IllegalStateException("invalid branch: " + branch); @@ -160,7 +157,7 @@ public void testRewriteTimeZone() throws IOException { DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("my_date_histo"); builder.field(DATE_FIELD_NAME); - builder.dateHistogramInterval(DateHistogramInterval.DAY); + builder.calendarInterval(DateHistogramInterval.DAY); // no timeZone => no rewrite assertNull(builder.rewriteTimeZone(shardContextThatDoesntCross)); @@ -179,7 +176,7 @@ public void testRewriteTimeZone() throws IOException { assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses)); // Rounded values are no longer all within the same transitions => no rewrite - builder.dateHistogramInterval(DateHistogramInterval.MONTH); + builder.calendarInterval(DateHistogramInterval.MONTH); assertSame(tz, builder.rewriteTimeZone(shardContextThatDoesntCross)); assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses)); @@ -187,13 +184,13 @@ public void testRewriteTimeZone() throws IOException { builder.field(DATE_FIELD_NAME); builder.timeZone(tz); - builder.interval(1000L * 60 * 60 * 24); // ~ 1 day + builder.fixedInterval(new DateHistogramInterval(1000L * 60 * 60 * 24 + "ms")); // ~ 1 day assertEquals(ZoneOffset.ofHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross)); assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses)); // Because the interval is large, rounded values are not // within the same transitions as the values => no rewrite - builder.interval(1000L * 60 * 60 * 24 * 30); // ~ 1 month + builder.fixedInterval(new DateHistogramInterval(1000L * 60 * 60 * 24 * 30 + "ms")); // ~ 1 month assertSame(tz, builder.rewriteTimeZone(shardContextThatDoesntCross)); assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java new file mode 100644 index 0000000000000..36cab5b603a6c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapperTests.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateIntervalWrapperTests extends ESTestCase { + public void testValidOrdinals() { + assertThat(DateIntervalWrapper.IntervalTypeEnum.NONE.ordinal(), equalTo(0)); + assertThat(DateIntervalWrapper.IntervalTypeEnum.FIXED.ordinal(), equalTo(1)); + assertThat(DateIntervalWrapper.IntervalTypeEnum.CALENDAR.ordinal(), equalTo(2)); + assertThat(DateIntervalWrapper.IntervalTypeEnum.LEGACY_INTERVAL.ordinal(), equalTo(3)); + assertThat(DateIntervalWrapper.IntervalTypeEnum.LEGACY_DATE_HISTO.ordinal(), equalTo(4)); + } + + public void testwriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + DateIntervalWrapper.IntervalTypeEnum.NONE.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + DateIntervalWrapper.IntervalTypeEnum.FIXED.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(in.readVInt(), equalTo(1)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + DateIntervalWrapper.IntervalTypeEnum.CALENDAR.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(in.readVInt(), equalTo(2)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + DateIntervalWrapper.IntervalTypeEnum.LEGACY_INTERVAL.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(in.readVInt(), equalTo(3)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + DateIntervalWrapper.IntervalTypeEnum.LEGACY_DATE_HISTO.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(in.readVInt(), equalTo(4)); + } + } + + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), + equalTo(DateIntervalWrapper.IntervalTypeEnum.NONE)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), + equalTo(DateIntervalWrapper.IntervalTypeEnum.FIXED)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(2); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), + equalTo(DateIntervalWrapper.IntervalTypeEnum.CALENDAR)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(3); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), + equalTo(DateIntervalWrapper.IntervalTypeEnum.LEGACY_INTERVAL)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(4); + try (StreamInput in = out.bytes().streamInput()) { + assertThat(DateIntervalWrapper.IntervalTypeEnum.fromStream(in), + equalTo(DateIntervalWrapper.IntervalTypeEnum.LEGACY_DATE_HISTO)); + } + } + } + + public void testInvalidReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(randomIntBetween(5, Integer.MAX_VALUE)); + try (StreamInput in = out.bytes().streamInput()) { + DateIntervalWrapper.IntervalTypeEnum.fromStream(in); + fail("Expected IOException"); + } catch(IOException e) { + assertThat(e.getMessage(), containsString("Unknown IntervalTypeEnum ordinal [")); + } + + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index 627ca9c0af9bb..4f312a71a8352 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -84,7 +84,7 @@ public void testSameAggNames() throws IOException { AvgAggregationBuilder avgBuilder = new AvgAggregationBuilder("foo").field(VALUE_FIELD); DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("histo") - .dateHistogramInterval(DateHistogramInterval.YEAR) + .calendarInterval(DateHistogramInterval.YEAR) .field(DATE_FIELD) .subAggregation(new AvgAggregationBuilder("foo").field(VALUE_FIELD)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index e3475be5773e8..9d27663d275f7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -84,7 +84,7 @@ public void testSimple() throws IOException { Query query = new MatchAllDocsQuery(); DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); - aggBuilder.dateHistogramInterval(DateHistogramInterval.DAY).field(HISTO_FIELD); + aggBuilder.calendarInterval(DateHistogramInterval.DAY).field(HISTO_FIELD); aggBuilder.subAggregation(new AvgAggregationBuilder("the_avg").field(VALUE_FIELD)); aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "the_avg")); @@ -107,7 +107,7 @@ public void testDerivative() throws IOException { Query query = new MatchAllDocsQuery(); DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); - aggBuilder.dateHistogramInterval(DateHistogramInterval.DAY).field(HISTO_FIELD); + aggBuilder.calendarInterval(DateHistogramInterval.DAY).field(HISTO_FIELD); aggBuilder.subAggregation(new AvgAggregationBuilder("the_avg").field(VALUE_FIELD)); aggBuilder.subAggregation(new DerivativePipelineAggregationBuilder("the_deriv", "the_avg")); aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "the_deriv")); @@ -148,6 +148,7 @@ public void testCount() throws IOException { sum += 1.0; } }); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } public void testDocCount() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java index 1368db5ab71e6..27490fa202bda 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java @@ -83,7 +83,7 @@ public void testMatchAllDocs() throws IOException { Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", Collections.emptyMap()); DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); - aggBuilder.dateHistogramInterval(DateHistogramInterval.DAY).field(DATE_FIELD); + aggBuilder.calendarInterval(DateHistogramInterval.DAY).field(DATE_FIELD); aggBuilder.subAggregation(new AvgAggregationBuilder("avg").field(VALUE_FIELD)); aggBuilder.subAggregation(new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, 3)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java index bb1faeddd8298..27938352ef4b6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java @@ -132,10 +132,17 @@ private static long validateAndGetDateHistogramInterval(DateHistogramAggregation throw ExceptionsHelper.badRequestException("ML requires date_histogram.time_zone to be UTC"); } - if (dateHistogram.dateHistogramInterval() != null) { + // TODO retains `dateHistogramInterval()`/`interval()` access for bwc logic, needs updating + if (dateHistogram.getCalendarInterval() != null) { + return validateAndGetCalendarInterval(dateHistogram.getCalendarInterval().toString()); + } else if (dateHistogram.getFixedInterval() != null) { + return dateHistogram.getFixedInterval().estimateMillis(); + } else if (dateHistogram.dateHistogramInterval() != null) { return validateAndGetCalendarInterval(dateHistogram.dateHistogramInterval().toString()); - } else { + } else if (dateHistogram.interval() != 0) { return dateHistogram.interval(); + } else { + throw new IllegalArgumentException("Must specify an interval for DateHistogram"); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java index 93cf0cbeeb30c..0fe47d96ffe92 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java @@ -146,7 +146,8 @@ private static Map createRollupFieldCaps(final RollupJo final DateHistogramGroupConfig dateHistogram = groupConfig.getDateHistogram(); final Map dateHistogramAggCap = new HashMap<>(); dateHistogramAggCap.put("agg", DateHistogramAggregationBuilder.NAME); - dateHistogramAggCap.put(DateHistogramGroupConfig.INTERVAL, dateHistogram.getInterval().toString()); + dateHistogramAggCap.put(dateHistogram.getIntervalTypeName(), dateHistogram.getInterval().toString()); + if (dateHistogram.getDelay() != null) { dateHistogramAggCap.put(DateHistogramGroupConfig.DELAY, dateHistogram.getDelay().toString()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java index c9fe0c644a86b..4db5966671df4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java @@ -50,17 +50,45 @@ public class DateHistogramGroupConfig implements Writeable, ToXContentObject { static final String NAME = "date_histogram"; public static final String INTERVAL = "interval"; - private static final String FIELD = "field"; + public static final String FIXED_INTERVAL = "fixed_interval"; + public static final String CALENDAR_INTERVAL = "calendar_interval"; public static final String TIME_ZONE = "time_zone"; public static final String DELAY = "delay"; - public static final String DEFAULT_TIMEZONE = "UTC"; + + private static final String DEFAULT_TIMEZONE = "UTC"; public static final ZoneId DEFAULT_ZONEID_TIMEZONE = ZoneOffset.UTC; + private static final String FIELD = "field"; + private static final String TYPE_NAME = "interval"; + private static final ConstructingObjectParser PARSER; static { - PARSER = new ConstructingObjectParser<>(NAME, a -> - new DateHistogramGroupConfig((String) a[0], (DateHistogramInterval) a[1], (DateHistogramInterval) a[2], (String) a[3])); + PARSER = new ConstructingObjectParser<>(NAME, a -> { + DateHistogramInterval oldInterval = (DateHistogramInterval) a[1]; + DateHistogramInterval calendarInterval = (DateHistogramInterval) a[2]; + DateHistogramInterval fixedInterval = (DateHistogramInterval) a[3]; + + if (oldInterval != null) { + if (calendarInterval != null || fixedInterval != null) { + throw new IllegalArgumentException("Cannot use [interval] with [fixed_interval] or [calendar_interval] " + + "configuration options."); + } + return fromUnknownTimeUnit((String) a[0], oldInterval, (DateHistogramInterval) a[4], (String) a[5]); + } else if (calendarInterval != null && fixedInterval == null) { + return new CalendarInterval((String) a[0], calendarInterval, (DateHistogramInterval) a[4], (String) a[5]); + } else if (calendarInterval == null && fixedInterval != null) { + return new FixedInterval((String) a[0], fixedInterval, (DateHistogramInterval) a[4], (String) a[5]); + } else if (calendarInterval != null && fixedInterval != null) { + throw new IllegalArgumentException("Cannot set both [fixed_interval] and [calendar_interval] at the same time"); + } else { + throw new IllegalArgumentException("An interval is required. Use [fixed_interval] or [calendar_interval]."); + } + }); PARSER.declareString(constructorArg(), new ParseField(FIELD)); - PARSER.declareField(constructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(INTERVAL), ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), + new ParseField(CALENDAR_INTERVAL), ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), + new ParseField(FIXED_INTERVAL), ValueType.STRING); PARSER.declareField(optionalConstructorArg(), p -> new DateHistogramInterval(p.text()), new ParseField(DELAY), ValueType.STRING); PARSER.declareString(optionalConstructorArg(), new ParseField(TIME_ZONE)); } @@ -70,9 +98,98 @@ public class DateHistogramGroupConfig implements Writeable, ToXContentObject { private final DateHistogramInterval delay; private final String timeZone; + /** + * FixedInterval is a {@link DateHistogramGroupConfig} that uses a fixed time interval for rolling up data. + * The fixed time interval is one or multiples of SI units and has no calendar-awareness (e.g. doesn't account + * for leap corrections, does not have variable length months, etc). + * + * For calendar-aware rollups, use {@link CalendarInterval} + */ + public static class FixedInterval extends DateHistogramGroupConfig { + private static final String TYPE_NAME = "fixed_interval"; + public FixedInterval(String field, DateHistogramInterval interval) { + this(field, interval, null, null); + } + + public FixedInterval(String field, DateHistogramInterval interval, DateHistogramInterval delay, String timeZone) { + super(field, interval, delay, timeZone); + // validate fixed time + TimeValue.parseTimeValue(interval.toString(), NAME + ".FixedInterval"); + } + + FixedInterval(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getIntervalTypeName() { + return TYPE_NAME; + } + } + + /** + * CalendarInterval is a {@link DateHistogramGroupConfig} that uses calendar-aware intervals for rolling up data. + * Calendar time intervals understand leap corrections and contextual differences in certain calendar units (e.g. + * months are variable length depending on the month). Calendar units are only available in singular quantities: + * 1s, 1m, 1h, 1d, 1w, 1q, 1M, 1y + * + * For fixed time rollups, use {@link FixedInterval} + */ + public static class CalendarInterval extends DateHistogramGroupConfig { + private static final String TYPE_NAME = "calendar_interval"; + public CalendarInterval(String field, DateHistogramInterval interval) { + this(field, interval, null, null); + } + + public CalendarInterval(String field, DateHistogramInterval interval, DateHistogramInterval delay, String timeZone) { + super(field, interval, delay, timeZone); + if (DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()) == null) { + throw new IllegalArgumentException("The supplied interval [" + interval +"] could not be parsed " + + "as a calendar interval."); + } + } + + CalendarInterval(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getIntervalTypeName() { + return TYPE_NAME; + } + } + + /** + * This helper can be used to "up-convert" a legacy job date histo config stored with plain "interval" into + * one of the new Fixed or Calendar intervals. It follows the old behavior where the interval is first + * parsed with the calendar logic, and if that fails, it is assumed to be a fixed interval + */ + private static DateHistogramGroupConfig fromUnknownTimeUnit(String field, DateHistogramInterval interval, + DateHistogramInterval delay, String timeZone) { + if (DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()) != null) { + return new CalendarInterval(field, interval, delay, timeZone); + } else { + return new FixedInterval(field, interval, delay, timeZone); + } + } + + static DateHistogramGroupConfig fromUnknownTimeUnit(StreamInput in) throws IOException { + DateHistogramInterval interval = new DateHistogramInterval(in); + String field = in.readString(); + DateHistogramInterval delay = in.readOptionalWriteable(DateHistogramInterval::new); + String timeZone = in.readString(); + return fromUnknownTimeUnit(field, interval, delay, timeZone); + } + /** * Create a new {@link DateHistogramGroupConfig} using the given field and interval parameters. + * + * @deprecated Build a DateHistoConfig using {@link DateHistogramGroupConfig.CalendarInterval} + * or {@link DateHistogramGroupConfig.FixedInterval} instead + * + * @since 7.2.0 */ + @Deprecated public DateHistogramGroupConfig(final String field, final DateHistogramInterval interval) { this(field, interval, null, null); } @@ -89,7 +206,13 @@ public DateHistogramGroupConfig(final String field, final DateHistogramInterval * @param interval the interval to use for the date histogram (required) * @param delay the time delay (optional) * @param timeZone the id of time zone to use to calculate the date histogram (optional). When {@code null}, the UTC timezone is used. + * + * @deprecated Build a DateHistoConfig using {@link DateHistogramGroupConfig.CalendarInterval} + * or {@link DateHistogramGroupConfig.FixedInterval} instead + * + * @since 7.2.0 */ + @Deprecated public DateHistogramGroupConfig(final String field, final DateHistogramInterval interval, final @Nullable DateHistogramInterval delay, @@ -114,6 +237,13 @@ public DateHistogramGroupConfig(final String field, } } + /** + * @deprecated Build a DateHistoConfig using {@link DateHistogramGroupConfig.CalendarInterval} + * or {@link DateHistogramGroupConfig.FixedInterval} instead + * + * @since 7.2.0 + */ + @Deprecated DateHistogramGroupConfig(final StreamInput in) throws IOException { interval = new DateHistogramInterval(in); field = in.readString(); @@ -133,7 +263,7 @@ public void writeTo(final StreamOutput out) throws IOException { public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); { - builder.field(INTERVAL, interval.toString()); + builder.field(getIntervalTypeName(), interval.toString()); builder.field(FIELD, field); if (delay != null) { builder.field(DELAY, delay.toString()); @@ -178,6 +308,10 @@ public Rounding createRounding() { return createRounding(interval.toString(), timeZone); } + public String getIntervalTypeName() { + return TYPE_NAME; + } + public void validateMappings(Map> fieldCapsResponse, ActionRequestValidationException validationException) { @@ -205,7 +339,7 @@ public boolean equals(final Object other) { if (this == other) { return true; } - if (other == null || getClass() != other.getClass()) { + if (other == null || other instanceof DateHistogramGroupConfig == false) { return false; } final DateHistogramGroupConfig that = (DateHistogramGroupConfig) other; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java index b7c69ecda0ee2..1fad03473d3d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java @@ -76,7 +76,7 @@ public GroupConfig(final DateHistogramGroupConfig dateHistogram, } public GroupConfig(final StreamInput in) throws IOException { - dateHistogram = new DateHistogramGroupConfig(in); + dateHistogram = DateHistogramGroupConfig.fromUnknownTimeUnit(in); histogram = in.readOptionalWriteable(HistogramGroupConfig::new); terms = in.readOptionalWriteable(TermsGroupConfig::new); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsActionResponseTests.java index ba40717959c57..383a4a7d62e70 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsActionResponseTests.java @@ -40,4 +40,6 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); return new NamedWriteableRegistry(searchModule.getNamedWriteables()); } + + } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index 71491c9227728..6b664777a2d86 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -70,6 +70,22 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase { + @AwaitsFix(bugUrl = "Tests need to be updated to use calendar/fixed interval explicitly") + public void testIntervalWarnings() { + /* + Placeholder test for visibility. Datafeeds use calendar and fixed intervals through the deprecated + methods. The randomized creation + final superclass tests made it impossible to add warning assertions, + so warnings have been disabled on this test. + + When fixed, `enableWarningsCheck()` should be removed. + */ + } + + @Override + protected boolean enableWarningsCheck() { + return false; + } + @Override protected DatafeedConfig createTestInstance() { return createRandomizedDatafeedConfig(randomAlphaOfLength(10)); @@ -110,7 +126,7 @@ private static DatafeedConfig.Builder createRandomizedDatafeedConfigBuilder(Stri aggHistogramInterval = aggHistogramInterval <= 0 ? 1 : aggHistogramInterval; MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); aggs.addAggregator(AggregationBuilders.dateHistogram("buckets") - .interval(aggHistogramInterval).subAggregation(maxTime).field("time")); + .fixedInterval(new DateHistogramInterval(aggHistogramInterval + "ms")).subAggregation(maxTime).field("time")); builder.setParsedAggregations(aggs); } if (randomBoolean()) { @@ -194,7 +210,7 @@ protected DatafeedConfig doParseInstance(XContentParser parser) { " \"buckets\": {\n" + " \"date_histogram\": {\n" + " \"field\": \"time\",\n" + - " \"interval\": \"360s\",\n" + + " \"fixed_interval\": \"360s\",\n" + " \"time_zone\": \"UTC\"\n" + " },\n" + " \"aggregations\": {\n" + @@ -506,6 +522,7 @@ public void testBuild_GivenDateHistogramWithInvalidTimeZone() { assertThat(e.getMessage(), equalTo("ML requires date_histogram.time_zone to be UTC")); } + @AwaitsFix(bugUrl = "Needs ML to look at and fix. Unclear how this should be handled, interval is not an optional param") public void testBuild_GivenDateHistogramWithDefaultInterval() { ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> createDatafeedWithDateHistogram((String) null)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index 62436172d92a5..571c9e81a9068 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -54,6 +54,22 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase { + @AwaitsFix(bugUrl = "Tests need to be updated to use calendar/fixed interval explicitly") + public void testIntervalWarnings() { + /* + Placeholder test for visibility. Datafeeds use calendar and fixed intervals through the deprecated + methods. The randomized creation + final superclass tests made it impossible to add warning assertions, + so warnings have been disabled on this test. + + When fixed, `enableWarningsCheck()` should be removed. + */ + } + + @Override + protected boolean enableWarningsCheck() { + return false; + } + @Override protected DatafeedUpdate createTestInstance() { return createRandomized(DatafeedConfigTests.randomValidDatafeedId()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java index 532468216e5aa..6e11728cdabb6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; @@ -79,14 +80,27 @@ public void testGetHistogramIntervalMillis_GivenDateHistogramWithInvalidTimeZone () -> ExtractorUtils.getHistogramIntervalMillis(dateHistogram)); assertThat(e.getMessage(), equalTo("ML requires date_histogram.time_zone to be UTC")); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " + + "or [calendar_interval] in the future."); } - public void testGetHistogramIntervalMillis_GivenUtcTimeZones() { + public void testGetHistogramIntervalMillis_GivenUtcTimeZonesDeprecated() { MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); ZoneId zone = randomFrom(ZoneOffset.UTC, ZoneId.of("UTC")); DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time") .interval(300000L).timeZone(zone).subAggregation(maxTime); assertThat(ExtractorUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L)); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " + + "or [calendar_interval] in the future."); + } + + public void testGetHistogramIntervalMillis_GivenUtcTimeZones() { + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + ZoneId zone = randomFrom(ZoneOffset.UTC, ZoneId.of("UTC")); + DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time") + .fixedInterval(new DateHistogramInterval("300000ms")).timeZone(zone).subAggregation(maxTime); + assertThat(ExtractorUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L)); } public void testIsHistogram() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java index 605ea6e901a90..3535cb1ed55a5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java @@ -7,6 +7,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; @@ -69,12 +70,33 @@ public static GroupConfig randomGroupConfig(final Random random) { public static DateHistogramGroupConfig randomDateHistogramGroupConfig(final Random random) { final String field = randomField(random); - final DateHistogramInterval interval = randomInterval(); final DateHistogramInterval delay = random.nextBoolean() ? randomInterval() : null; - String timezone = random.nextBoolean() ? randomZone().getId() : null; - return new DateHistogramGroupConfig(field, interval, delay, timezone); + final String timezone = random.nextBoolean() ? randomZone().getId() : null; + if (random.nextBoolean()) { + return new DateHistogramGroupConfig.FixedInterval(field, randomInterval(), delay, timezone); + } else { + int i = random.nextInt(DateHistogramAggregationBuilder.DATE_FIELD_UNITS.size()); + List units = new ArrayList<>(DateHistogramAggregationBuilder.DATE_FIELD_UNITS.keySet()); + Collections.shuffle(units, random); + return new DateHistogramGroupConfig.CalendarInterval(field, new DateHistogramInterval(units.get(0)), delay, timezone); + } } + public static DateHistogramGroupConfig randomLegacyDateHistogramGroupConfig(final Random random) { + final String field = randomField(random); + final DateHistogramInterval delay = random.nextBoolean() ? randomInterval() : null; + final String timezone = random.nextBoolean() ? randomZone().getId() : null; + if (random.nextBoolean()) { + return new DateHistogramGroupConfig(field, randomInterval(), delay, timezone); + } else { + int i = random.nextInt(DateHistogramAggregationBuilder.DATE_FIELD_UNITS.size()); + List units = new ArrayList<>(DateHistogramAggregationBuilder.DATE_FIELD_UNITS.keySet()); + Collections.shuffle(units, random); + return new DateHistogramGroupConfig(field, new DateHistogramInterval(units.get(0)), delay, timezone); + } + } + + public static List getFields() { return IntStream.range(0, ESTestCase.randomIntBetween(1, 10)) .mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(5, 10)) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java index 95df682ff5e14..65844e9e1ca95 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java @@ -27,6 +27,12 @@ import static org.mockito.Mockito.when; public class DateHistogramGroupConfigSerializingTests extends AbstractSerializingTestCase { + + private enum DateHistoType { + LEGACY, FIXED, CALENDAR + } + private static DateHistoType type; + @Override protected DateHistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException { return DateHistogramGroupConfig.fromXContent(parser); @@ -34,19 +40,33 @@ protected DateHistogramGroupConfig doParseInstance(final XContentParser parser) @Override protected Writeable.Reader instanceReader() { + if (type.equals(DateHistoType.FIXED)) { + return DateHistogramGroupConfig.FixedInterval::new; + } else if (type.equals(DateHistoType.CALENDAR)) { + return DateHistogramGroupConfig.CalendarInterval::new; + } return DateHistogramGroupConfig::new; } @Override protected DateHistogramGroupConfig createTestInstance() { - return randomDateHistogramGroupConfig(random()); + DateHistogramGroupConfig config = randomDateHistogramGroupConfig(random()); + if (config.getClass().equals(DateHistogramGroupConfig.FixedInterval.class)) { + type = DateHistoType.FIXED; + } else if (config.getClass().equals(DateHistogramGroupConfig.CalendarInterval.class)) { + type = DateHistoType.CALENDAR; + } else { + type = DateHistoType.LEGACY; + } + return config; } public void testValidateNoMapping() { ActionRequestValidationException e = new ActionRequestValidationException(); Map> responseMap = new HashMap<>(); - DateHistogramGroupConfig config = new DateHistogramGroupConfig("my_field", new DateHistogramInterval("1d"), null, null); + DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", + new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("Could not find a [date] field with name [my_field] in any of the " + "indices matching the index pattern.")); @@ -60,7 +80,8 @@ public void testValidateNomatchingField() { FieldCapabilities fieldCaps = mock(FieldCapabilities.class); responseMap.put("some_other_field", Collections.singletonMap("date", fieldCaps)); - DateHistogramGroupConfig config = new DateHistogramGroupConfig("my_field", new DateHistogramInterval("1d"), null, null); + DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", + new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("Could not find a [date] field with name [my_field] in any of the " + "indices matching the index pattern.")); @@ -74,7 +95,8 @@ public void testValidateFieldWrongType() { FieldCapabilities fieldCaps = mock(FieldCapabilities.class); responseMap.put("my_field", Collections.singletonMap("keyword", fieldCaps)); - DateHistogramGroupConfig config = new DateHistogramGroupConfig("my_field", new DateHistogramInterval("1d"), null, null); + DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", + new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("The field referenced by a date_histo group must be a [date] type across all " + "indices in the index pattern. Found: [keyword] for field [my_field]")); @@ -91,7 +113,8 @@ public void testValidateFieldMixtureTypes() { types.put("keyword", fieldCaps); responseMap.put("my_field", types); - DateHistogramGroupConfig config = new DateHistogramGroupConfig("my_field", new DateHistogramInterval("1d"), null, null); + DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", + new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("The field referenced by a date_histo group must be a [date] type across all " + "indices in the index pattern. Found: [date, keyword] for field [my_field]")); @@ -106,7 +129,8 @@ public void testValidateFieldMatchingNotAggregatable() { when(fieldCaps.isAggregatable()).thenReturn(false); responseMap.put("my_field", Collections.singletonMap("date", fieldCaps)); - DateHistogramGroupConfig config =new DateHistogramGroupConfig("my_field", new DateHistogramInterval("1d"), null, null); + DateHistogramGroupConfig config =new DateHistogramGroupConfig.CalendarInterval("my_field", + new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not.")); } @@ -120,7 +144,8 @@ public void testValidateMatchingField() { when(fieldCaps.isAggregatable()).thenReturn(true); responseMap.put("my_field", Collections.singletonMap("date", fieldCaps)); - DateHistogramGroupConfig config = new DateHistogramGroupConfig("my_field", new DateHistogramInterval("1d"), null, null); + DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", + new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().size(), equalTo(0)); } @@ -134,7 +159,8 @@ public void testValidateWeek() { when(fieldCaps.isAggregatable()).thenReturn(true); responseMap.put("my_field", Collections.singletonMap("date", fieldCaps)); - DateHistogramGroupConfig config = new DateHistogramGroupConfig("my_field", new DateHistogramInterval("1w"), null, null); + DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", + new DateHistogramInterval("1w"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().size(), equalTo(0)); } @@ -145,7 +171,7 @@ public void testValidateWeek() { */ public void testBwcSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { - final DateHistogramGroupConfig reference = ConfigTestHelpers.randomDateHistogramGroupConfig(random()); + final DateHistogramGroupConfig reference = ConfigTestHelpers.randomLegacyDateHistogramGroupConfig(random()); final BytesStreamOutput out = new BytesStreamOutput(); reference.writeTo(out); @@ -179,4 +205,44 @@ public void testBwcSerialization() throws IOException { assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getId()), deserialized); } } + + /** + * Tests that old DateHistogramGroupConfigs can be serialized/deserialized + * into the specialized Fixed/Calendar versions + */ + public void testLegacyConfigBWC() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { + // Serialize the old format + final DateHistogramGroupConfig reference = ConfigTestHelpers.randomLegacyDateHistogramGroupConfig(random()); + + final BytesStreamOutput out = new BytesStreamOutput(); + reference.writeTo(out); + final StreamInput in = out.bytes().streamInput(); + + // Deserialize the new format + DateHistogramGroupConfig test = DateHistogramGroupConfig.fromUnknownTimeUnit(in); + + assertThat(reference.getInterval(), equalTo(test.getInterval())); + assertThat(reference.getField(), equalTo(test.getField())); + assertThat(reference.getTimeZone(), equalTo(test.getTimeZone())); + assertThat(reference.getDelay(), equalTo(test.getDelay())); + } + + for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { + // Serialize the new format + final DateHistogramGroupConfig reference = ConfigTestHelpers.randomDateHistogramGroupConfig(random()); + + final BytesStreamOutput out = new BytesStreamOutput(); + reference.writeTo(out); + final StreamInput in = out.bytes().streamInput(); + + // Deserialize the old format + DateHistogramGroupConfig test = new DateHistogramGroupConfig(in); + + assertThat(reference.getInterval(), equalTo(test.getInterval())); + assertThat(reference.getField(), equalTo(test.getField())); + assertThat(reference.getTimeZone(), equalTo(test.getTimeZone())); + assertThat(reference.getDelay(), equalTo(test.getDelay())); + } + } } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 75e179e5dee4c..71f0dc248b8b6 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -226,10 +226,14 @@ public void testDateHistogramPivot() throws Exception { + "}"; createDataframeTransformRequest.setJsonEntity(config); + createDataframeTransformRequest.setOptions(expectWarnings("[interval] on [date_histogram] is deprecated, " + + "use [fixed_interval] or [calendar_interval] in the future.")); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); - startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS, + "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); assertTrue(indexExists(dataFrameIndex)); Map indexStats = getAsMap(dataFrameIndex + "/_stats"); @@ -257,6 +261,9 @@ public void testPreviewTransform() throws Exception { + " } } } }" + "}"; createPreviewRequest.setJsonEntity(config); + createPreviewRequest.setOptions(expectWarnings("[interval] on [date_histogram] is deprecated, " + + "use [fixed_interval] or [calendar_interval] in the future.")); + Map previewDataframeResponse = entityAsMap(client().performRequest(createPreviewRequest)); List> preview = (List>)previewDataframeResponse.get("preview"); // preview is limited to 100 @@ -298,10 +305,13 @@ public void testPivotWithMaxOnDateField() throws Exception { + "}"; createDataframeTransformRequest.setJsonEntity(config); + createDataframeTransformRequest.setOptions(expectWarnings("[interval] on [date_histogram] is deprecated, " + + "use [fixed_interval] or [calendar_interval] in the future.")); Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); - startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS, + "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); assertTrue(indexExists(dataFrameIndex)); // we expect 21 documents as there shall be 21 days worth of docs diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 89047219f401d..85c0ac44a69af 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -176,10 +176,13 @@ protected void startDataframeTransform(String transformId, boolean force) throws startDataframeTransform(transformId, force, null); } - protected void startDataframeTransform(String transformId, boolean force, String authHeader) throws IOException { + protected void startDataframeTransform(String transformId, boolean force, String authHeader, String... warnings) throws IOException { // start the transform final Request startTransformRequest = createRequestWithAuth("POST", DATAFRAME_ENDPOINT + transformId + "/_start", authHeader); startTransformRequest.addParameter(DataFrameField.FORCE.getPreferredName(), Boolean.toString(force)); + if (warnings.length > 0) { + startTransformRequest.setOptions(expectWarnings(warnings)); + } Map startTransformResponse = entityAsMap(client().performRequest(startTransformRequest)); assertThat(startTransformResponse.get("started"), equalTo(Boolean.TRUE)); } @@ -198,8 +201,13 @@ protected void startAndWaitForTransform(String transformId, String dataFrameInde } protected void startAndWaitForTransform(String transformId, String dataFrameIndex, String authHeader) throws Exception { + startAndWaitForTransform(transformId, dataFrameIndex, authHeader, new String[0]); + } + + protected void startAndWaitForTransform(String transformId, String dataFrameIndex, + String authHeader, String... warnings) throws Exception { // start the transform - startDataframeTransform(transformId, false, authHeader); + startDataframeTransform(transformId, false, authHeader, warnings); assertTrue(indexExists(dataFrameIndex)); // wait until the dataframe has been created and all data is available waitForDataFrameCheckpoint(transformId); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java index 172868833f36a..4c434cdbee7fd 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java @@ -83,6 +83,16 @@ protected NamedXContentRegistry xContentRegistry() { return namedXContentRegistry; } + + /* + Had to disable warnings because tests get random date histo configs, and changing to + new interval format was non-trivial. Best for ML team to fix + */ + @Override + protected boolean enableWarningsCheck() { + return false; + } + public void testValidateExistingIndex() throws Exception { SourceConfig source = new SourceConfig(new String[]{"existing_source_index"}, QueryConfig.matchAll()); Pivot pivot = new Pivot(getValidPivotConfig()); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 8c5f5cf1e39f9..426b58f686419 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -391,7 +391,7 @@ public void testInsufficientSearchPrivilegesOnPutWithRollup() throws Exception { + " \"groups\" : {\n" + " \"date_histogram\": {\n" + " \"field\": \"time stamp\",\n" - + " \"interval\": \"2m\",\n" + + " \"fixed_interval\": \"2m\",\n" + " \"delay\": \"7d\"\n" + " },\n" + " \"terms\": {\n" @@ -412,7 +412,7 @@ public void testInsufficientSearchPrivilegesOnPutWithRollup() throws Exception { client().performRequest(createRollupRequest); String datafeedId = "datafeed-" + jobId; - String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":3600000}," + String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"fixed_interval\":\"3600000ms\"}," + "\"aggregations\":{" + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}"; @@ -524,7 +524,7 @@ public void testLookbackOnlyGivenAggregationsWithDateHistogram() throws Exceptio client().performRequest(createJobRequest); String datafeedId = "datafeed-" + jobId; - String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"1h\"}," + String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"calendar_interval\":\"1h\"}," + "\"aggregations\":{" + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"airline\":{\"terms\":{\"field\":\"airline\",\"size\":10}," @@ -564,7 +564,7 @@ public void testLookbackUsingDerivativeAggWithLargerHistogramBucketThanDataRate( String datafeedId = "datafeed-" + jobId; String aggregations = "{\"hostname\": {\"terms\" : {\"field\": \"host.keyword\", \"size\":10}," - + "\"aggs\": {\"buckets\": {\"date_histogram\":{\"field\":\"timestamp\",\"interval\":\"60s\"}," + + "\"aggs\": {\"buckets\": {\"date_histogram\":{\"field\":\"timestamp\",\"fixed_interval\":\"60s\"}," + "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}}," + "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}}," + "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}"; @@ -610,7 +610,7 @@ public void testLookbackUsingDerivativeAggWithSmallerHistogramBucketThanDataRate String datafeedId = "datafeed-" + jobId; String aggregations = "{\"hostname\": {\"terms\" : {\"field\": \"host.keyword\", \"size\":10}," - + "\"aggs\": {\"buckets\": {\"date_histogram\":{\"field\":\"timestamp\",\"interval\":\"5s\"}," + + "\"aggs\": {\"buckets\": {\"date_histogram\":{\"field\":\"timestamp\",\"fixed_interval\":\"5s\"}," + "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}}," + "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}}," + "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}"; @@ -652,7 +652,7 @@ public void testLookbackWithoutPermissions() throws Exception { String datafeedId = "datafeed-" + jobId; String aggregations = "{\"hostname\": {\"terms\" : {\"field\": \"host.keyword\", \"size\":10}," - + "\"aggs\": {\"buckets\": {\"date_histogram\":{\"field\":\"timestamp\",\"interval\":\"5s\"}," + + "\"aggs\": {\"buckets\": {\"date_histogram\":{\"field\":\"timestamp\",\"fixed_interval\":\"5s\"}," + "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}}," + "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}}," + "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}"; @@ -706,7 +706,7 @@ public void testLookbackWithPipelineBucketAgg() throws Exception { client().performRequest(createJobRequest); String datafeedId = "datafeed-" + jobId; - String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"15m\"}," + String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"fixed_interval\":\"15m\"}," + "\"aggregations\":{" + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"airlines\":{\"terms\":{\"field\":\"airline.keyword\",\"size\":10}}," @@ -759,7 +759,7 @@ public void testLookbackOnlyGivenAggregationsWithHistogramAndRollupIndex() throw + " \"groups\" : {\n" + " \"date_histogram\": {\n" + " \"field\": \"time stamp\",\n" - + " \"interval\": \"2m\",\n" + + " \"fixed_interval\": \"2m\",\n" + " \"delay\": \"7d\"\n" + " },\n" + " \"terms\": {\n" @@ -797,7 +797,7 @@ public void testLookbackOnlyGivenAggregationsWithHistogramAndRollupIndex() throw client().performRequest(refreshRollupIndex); String datafeedId = "datafeed-" + jobId; - String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":3600000}," + String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"fixed_interval\":\"3600000ms\"}," + "\"aggregations\":{" + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}"; @@ -844,7 +844,7 @@ public void testLookbackWithoutPermissionsAndRollup() throws Exception { + " \"groups\" : {\n" + " \"date_histogram\": {\n" + " \"field\": \"time stamp\",\n" - + " \"interval\": \"2m\",\n" + + " \"fixed_interval\": \"2m\",\n" + " \"delay\": \"7d\"\n" + " },\n" + " \"terms\": {\n" @@ -865,7 +865,7 @@ public void testLookbackWithoutPermissionsAndRollup() throws Exception { client().performRequest(createRollupRequest); String datafeedId = "datafeed-" + jobId; - String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":3600000}," + String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"fixed_interval\":\"3600000ms\"}," + "\"aggregations\":{" + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}"; @@ -914,7 +914,7 @@ public void testLookbackWithSingleBucketAgg() throws Exception { client().performRequest(createJobRequest); String datafeedId = "datafeed-" + jobId; - String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"1h\"}," + String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"calendar_interval\":\"1h\"}," + "\"aggregations\":{" + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"airlineFilter\":{\"filter\":{\"term\": {\"airline\":\"AAA\"}}," diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index 744076320b6d0..e3af3b7ac64ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Min; @@ -278,7 +279,7 @@ private static AggregationBuilder buildAggregations(long maxBucketSpanMillis, in .field(Result.IS_INTERIM.getPreferredName()); return AggregationBuilders.dateHistogram(Result.TIMESTAMP.getPreferredName()) .field(Result.TIMESTAMP.getPreferredName()) - .interval(maxBucketSpanMillis) + .fixedInterval(new DateHistogramInterval(maxBucketSpanMillis + "ms")) .subAggregation(jobsAgg) .subAggregation(interimAgg); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java index 5cafb69bf3a51..d2a933ffa466f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.action.util.PageParams; @@ -110,7 +111,8 @@ private List checkBucketEvents(long start, long end) { private Map checkCurrentBucketEventCount(long start, long end) { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() .size(0) - .aggregation(new DateHistogramAggregationBuilder(DATE_BUCKETS).interval(bucketSpan).field(timeField)) + .aggregation(new DateHistogramAggregationBuilder(DATE_BUCKETS) + .fixedInterval(new DateHistogramInterval(bucketSpan + "ms")).field(timeField)) .query(ExtractorUtils.wrapInTimeRangeQuery(datafeedQuery, timeField, start, end)); SearchRequest searchRequest = new SearchRequest(datafeedIndices).source(searchSourceBuilder); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java index 8264d3e15fd59..7a66ff49d62af 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java @@ -207,7 +207,16 @@ private String getInterval() { if (datehistogramAgg == null) { return null; } - return (String)datehistogramAgg.get(DateHistogramGroupConfig.INTERVAL); + if (datehistogramAgg.get(DateHistogramGroupConfig.INTERVAL) != null) { + return (String)datehistogramAgg.get(DateHistogramGroupConfig.INTERVAL); + } + if (datehistogramAgg.get(DateHistogramGroupConfig.CALENDAR_INTERVAL) != null) { + return (String)datehistogramAgg.get(DateHistogramGroupConfig.CALENDAR_INTERVAL); + } + if (datehistogramAgg.get(DateHistogramGroupConfig.FIXED_INTERVAL) != null) { + return (String)datehistogramAgg.get(DateHistogramGroupConfig.FIXED_INTERVAL); + } + return null; } private String getTimezone() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java index dee28e71a7bf7..ed24af15962a2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java @@ -214,7 +214,10 @@ public void testCreateDataExtractorFactoryGivenRollupAndValidAggregation() { datafeedConfig.setParsedAggregations(AggregatorFactories.builder().addAggregator( AggregationBuilders.dateHistogram("time").interval(600_000).subAggregation(maxTime).subAggregation(myTerm).field("time"))); ActionListener listener = ActionListener.wrap( - dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(RollupDataExtractorFactory.class)), + dataExtractorFactory -> { + assertThat(dataExtractorFactory, instanceOf(RollupDataExtractorFactory.class)); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + }, e -> fail() ); DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); @@ -234,7 +237,10 @@ public void testCreateDataExtractorFactoryGivenRollupAndValidAggregationAndAutoC datafeedConfig.setParsedAggregations(AggregatorFactories.builder().addAggregator( AggregationBuilders.dateHistogram("time").interval(600_000).subAggregation(maxTime).subAggregation(myTerm).field("time"))); ActionListener listener = ActionListener.wrap( - dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(ChunkedDataExtractorFactory.class)), + dataExtractorFactory -> { + assertThat(dataExtractorFactory, instanceOf(ChunkedDataExtractorFactory.class)); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + }, e -> fail() ); DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); @@ -280,6 +286,7 @@ public void testCreateDataExtractorFactoryGivenRollupWithBadInterval() { containsString("Rollup capabilities do not have a [date_histogram] aggregation with an interval " + "that is a multiple of the datafeed's interval.")); assertThat(e, instanceOf(IllegalArgumentException.class)); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } ); DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); @@ -304,6 +311,7 @@ public void testCreateDataExtractorFactoryGivenRollupMissingTerms() { assertThat(e.getMessage(), containsString("Rollup capabilities do not support all the datafeed aggregations at the desired interval.")); assertThat(e, instanceOf(IllegalArgumentException.class)); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } ); DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); @@ -328,6 +336,7 @@ public void testCreateDataExtractorFactoryGivenRollupMissingMetric() { assertThat(e.getMessage(), containsString("Rollup capabilities do not support all the datafeed aggregations at the desired interval.")); assertThat(e, instanceOf(IllegalArgumentException.class)); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } ); DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), xContentRegistry(), listener); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java index 95b5069edcf88..8c2a0672678d0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java @@ -25,6 +25,10 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig.CALENDAR_INTERVAL; +import static org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig.FIXED_INTERVAL; +import static org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig.INTERVAL; + /** * This class contains utilities to identify which jobs are the "best" for a given aggregation tree. * It allows the caller to pass in a set of possible rollup job capabilities and get in return @@ -87,8 +91,7 @@ private static void doFindBestJobs(AggregationBuilder source, List jobCaps, - Set bestCaps) { + private static void checkDateHisto(DateHistogramAggregationBuilder source, List jobCaps, Set bestCaps) { ArrayList localCaps = new ArrayList<>(); for (RollupJobCaps cap : jobCaps) { RollupJobCaps.RollupFieldCaps fieldCaps = cap.getFieldCaps().get(source.field()); @@ -106,21 +109,102 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List< if (thisTimezone.getRules().equals(sourceTimeZone.getRules()) == false) { continue; } - if (source.dateHistogramInterval() != null) { - // Check if both are calendar and validate if they are. - // If not, check if both are fixed and validate - if (validateCalendarInterval(source.dateHistogramInterval(), interval)) { + + /* + This is convoluted, but new + legacy intervals makes for a big pattern match. + We have to match up date_histo [interval, fixed_interval, calendar_interval] with + rollup config [interval, fixed_interval, calendar_interval] + + To keep rightward drift to a minimum we break out of the loop if a successful match is found + */ + + DateHistogramInterval configCalendarInterval = agg.get(CALENDAR_INTERVAL) != null + ? new DateHistogramInterval((String) agg.get(CALENDAR_INTERVAL)) : null; + DateHistogramInterval configFixedInterval = agg.get(FIXED_INTERVAL) != null + ? new DateHistogramInterval((String) agg.get(FIXED_INTERVAL)) : null; + DateHistogramInterval configLegacyInterval = agg.get(INTERVAL) != null + ? new DateHistogramInterval((String) agg.get(INTERVAL)) : null; + + // If histo used calendar_interval explicitly + if (source.getCalendarInterval() != null) { + DateHistogramInterval requestInterval = source.getCalendarInterval(); + + // Try to use explicit calendar_interval on config if it exists + if (validateCalendarInterval(requestInterval, configCalendarInterval)) { localCaps.add(cap); - } else if (validateFixedInterval(source.dateHistogramInterval(), interval)) { + break; + } + + // Otherwise fall back to old style where we prefer calendar over fixed (e.g. `1h` == calendar) + if (validateCalendarInterval(requestInterval, configLegacyInterval)) { localCaps.add(cap); + break; } - } else { - // check if config is fixed and validate if it is - if (validateFixedInterval(source.interval(), interval)) { + + // Note that this ignores FIXED_INTERVAL on purpose, it would not be compatible + + } else if (source.getFixedInterval() != null) { + // If histo used fixed_interval explicitly + + DateHistogramInterval requestInterval = source.getFixedInterval(); + + // Try to use explicit fixed_interval on config if it exists + if (validateFixedInterval(requestInterval, configFixedInterval)) { localCaps.add(cap); + break; } + + // Otherwise fall back to old style + if (validateFixedInterval(requestInterval, configLegacyInterval)) { + localCaps.add(cap); + break; + } + + // Note that this ignores CALENDER_INTERVAL on purpose, it would not be compatible + + } else if (source.dateHistogramInterval() != null) { + // The histo used a deprecated interval method, so meaning is ambiguous. + // Use legacy method of preferring calendar over fixed + final DateHistogramInterval requestInterval = source.dateHistogramInterval(); + + // Try to use explicit calendar_interval on config if it exists + // Both must be calendar intervals + if (validateCalendarInterval(requestInterval, configCalendarInterval)) { + localCaps.add(cap); + break; + } + + // Otherwise fall back to old style where we prefer calendar over fixed (e.g. `1h` == calendar) + // Need to verify that the config interval is in fact calendar here + if (isCalendarInterval(configLegacyInterval) + && validateCalendarInterval(requestInterval, configLegacyInterval)) { + + localCaps.add(cap); + break; + } + + // The histo's interval couldn't be parsed as a calendar, so it is assumed fixed. + // Try to use explicit fixed_interval on config if it exists + if (validateFixedInterval(requestInterval, configFixedInterval)) { + localCaps.add(cap); + break; + } + + } else if (source.interval() != 0) { + // Otherwise fall back to old style interval millis + // Need to verify that the config interval is not calendar here + if (isCalendarInterval(configLegacyInterval) == false + && validateFixedInterval(new DateHistogramInterval(source.interval() + "ms"), configLegacyInterval)) { + + localCaps.add(cap); + break; + } + } else { + // This _should not_ happen, but if miraculously it does we need to just quit + throw new IllegalArgumentException("An interval of some variety must be configured on " + + "the date_histogram aggregation."); } - // not a candidate if we get here + // If we get here nothing matched, and we can break out break; } } @@ -141,32 +225,50 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List< } } + static String retrieveInterval(Map agg) { + String interval = (String) agg.get(RollupField.INTERVAL); + if (interval == null) { + interval = (String) agg.get(CALENDAR_INTERVAL); + } + if (interval == null) { + interval = (String) agg.get(FIXED_INTERVAL); + } + if (interval == null) { + throw new IllegalStateException("Could not find interval in agg cap: " + agg.toString()); + } + return interval; + } + private static boolean isCalendarInterval(DateHistogramInterval interval) { - return DateHistogramAggregationBuilder.DATE_FIELD_UNITS.containsKey(interval.toString()); + return interval != null && DateHistogramAggregationBuilder.DATE_FIELD_UNITS.containsKey(interval.toString()); } static boolean validateCalendarInterval(DateHistogramInterval requestInterval, DateHistogramInterval configInterval) { - // Both must be calendar intervals - if (isCalendarInterval(requestInterval) == false || isCalendarInterval(configInterval) == false) { + if (requestInterval == null || configInterval == null) { return false; } // The request must be gte the config. The CALENDAR_ORDERING map values are integers representing // relative orders between the calendar units Rounding.DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString()); - long requestOrder = requestUnit.getField().getBaseUnit().getDuration().toMillis(); + if (requestUnit == null) { + return false; + } Rounding.DateTimeUnit configUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(configInterval.toString()); + if (configUnit == null) { + return false; + } + + long requestOrder = requestUnit.getField().getBaseUnit().getDuration().toMillis(); long configOrder = configUnit.getField().getBaseUnit().getDuration().toMillis(); // All calendar units are multiples naturally, so we just care about gte return requestOrder >= configOrder; } - static boolean validateFixedInterval(DateHistogramInterval requestInterval, - DateHistogramInterval configInterval) { - // Neither can be calendar intervals - if (isCalendarInterval(requestInterval) || isCalendarInterval(configInterval)) { + static boolean validateFixedInterval(DateHistogramInterval requestInterval, DateHistogramInterval configInterval) { + if (requestInterval == null || configInterval == null) { return false; } @@ -180,18 +282,6 @@ static boolean validateFixedInterval(DateHistogramInterval requestInterval, return requestIntervalMillis >= configIntervalMillis && requestIntervalMillis % configIntervalMillis == 0; } - static boolean validateFixedInterval(long requestInterval, DateHistogramInterval configInterval) { - // config must not be a calendar interval - if (isCalendarInterval(configInterval)) { - return false; - } - long configIntervalMillis = TimeValue.parseTimeValue(configInterval.toString(), - "date_histo.config.interval").getMillis(); - - // Must be a multiple and gte the config - return requestInterval >= configIntervalMillis && requestInterval % configIntervalMillis == 0; - } - /** * Find the set of histo's with the largest interval */ @@ -202,7 +292,7 @@ private static void checkHisto(HistogramAggregationBuilder source, List agg : fieldCaps.getAggs()) { if (agg.get(RollupField.AGG).equals(HistogramAggregationBuilder.NAME)) { - Long interval = (long)agg.get(RollupField.INTERVAL); + long interval = (long) agg.get(RollupField.INTERVAL); // query interval must be gte the configured interval, and a whole multiple if (interval <= source.interval() && source.interval() % interval == 0) { localCaps.add(cap); @@ -324,7 +414,7 @@ private static Comparator getComparator() { for (RollupJobCaps.RollupFieldCaps fieldCaps : o1.getFieldCaps().values()) { for (Map agg : fieldCaps.getAggs()) { if (agg.get(RollupField.AGG).equals(DateHistogramAggregationBuilder.NAME)) { - thisTime = getMillisFixedOrCalendar((String) agg.get(RollupField.INTERVAL)); + thisTime = new DateHistogramInterval(retrieveInterval(agg)).estimateMillis(); } else if (agg.get(RollupField.AGG).equals(HistogramAggregationBuilder.NAME)) { thisHistoWeights += (long) agg.get(RollupField.INTERVAL); counter += 1; @@ -340,7 +430,7 @@ private static Comparator getComparator() { for (RollupJobCaps.RollupFieldCaps fieldCaps : o2.getFieldCaps().values()) { for (Map agg : fieldCaps.getAggs()) { if (agg.get(RollupField.AGG).equals(DateHistogramAggregationBuilder.NAME)) { - thatTime = getMillisFixedOrCalendar((String) agg.get(RollupField.INTERVAL)); + thatTime = new DateHistogramInterval(retrieveInterval(agg)).estimateMillis(); } else if (agg.get(RollupField.AGG).equals(HistogramAggregationBuilder.NAME)) { thatHistoWeights += (long) agg.get(RollupField.INTERVAL); counter += 1; @@ -385,14 +475,4 @@ private static Comparator getComparator() { // coverage }; } - - static long getMillisFixedOrCalendar(String value) { - DateHistogramInterval interval = new DateHistogramInterval(value); - if (isCalendarInterval(interval)) { - Rounding.DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()); - return intervalUnit.getField().getBaseUnit().getDuration().toMillis(); - } else { - return TimeValue.parseTimeValue(value, "date_histo.comparator.interval").getMillis(); - } - } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java index 7cf8f8d1293e1..b610dca45086e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; @@ -60,7 +61,7 @@ public class RollupRequestTranslator { * "the_histo": { * "date_histogram" : { * "field" : "ts", - * "interval" : "1d" + * "calendar_interval" : "1d" * }, * "aggs": { * "the_max": { @@ -93,7 +94,7 @@ public class RollupRequestTranslator { * "the_histo" : { * "date_histogram" : { * "field" : "ts.date_histogram.timestamp", - * "interval" : "1d" + * "calendar_interval" : "1d" * }, * "aggregations" : { * "the_histo._count" : { @@ -150,7 +151,7 @@ public static List translateAggregation(AggregationBuilder s * "the_histo": { * "date_histogram" : { * "field" : "ts", - * "interval" : "day" + * "calendar_interval" : "day" * } * } * } @@ -199,10 +200,16 @@ private static List translateDateHistogram(DateHistogramAggr DateHistogramAggregationBuilder rolledDateHisto = new DateHistogramAggregationBuilder(source.getName()); - if (source.dateHistogramInterval() != null) { + if (source.getCalendarInterval() != null) { + rolledDateHisto.calendarInterval(source.getCalendarInterval()); + } else if (source.getFixedInterval() != null) { + rolledDateHisto.fixedInterval(source.getFixedInterval()); + } else if (source.dateHistogramInterval() != null) { + // We have to fall back to deprecated interval because we're not sure if this is fixed or cal rolledDateHisto.dateHistogramInterval(source.dateHistogramInterval()); } else { - rolledDateHisto.interval(source.interval()); + // if interval() was used we know it is fixed and can upgrade + rolledDateHisto.fixedInterval(new DateHistogramInterval(source.interval() + "ms")); } ZoneId timeZone = source.timeZone() == null ? DateHistogramGroupConfig.DEFAULT_ZONEID_TIMEZONE : source.timeZone(); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index b60a37d3fa4e5..5a16be1456ab9 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -237,7 +237,13 @@ public static List> createValueSourceBuilders(fi final String dateHistogramField = dateHistogram.getField(); final String dateHistogramName = RollupField.formatIndexerAggName(dateHistogramField, DateHistogramAggregationBuilder.NAME); final DateHistogramValuesSourceBuilder dateHistogramBuilder = new DateHistogramValuesSourceBuilder(dateHistogramName); - dateHistogramBuilder.dateHistogramInterval(dateHistogram.getInterval()); + if (dateHistogram instanceof DateHistogramGroupConfig.FixedInterval) { + dateHistogramBuilder.fixedInterval(dateHistogram.getInterval()); + } else if (dateHistogram instanceof DateHistogramGroupConfig.CalendarInterval) { + dateHistogramBuilder.calendarInterval(dateHistogram.getInterval()); + } else { + dateHistogramBuilder.dateHistogramInterval(dateHistogram.getInterval()); + } dateHistogramBuilder.field(dateHistogramField); dateHistogramBuilder.timeZone(ZoneId.of(dateHistogram.getTimeZone())); return Collections.singletonList(dateHistogramBuilder); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java index 614cbba72b6ec..555b07a0b3987 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java @@ -44,65 +44,78 @@ public class RollupJobIdentifierUtilTests extends ESTestCase { private static final List UNITS = new ArrayList<>(DateHistogramAggregationBuilder.DATE_FIELD_UNITS.keySet()); public void testOneMatch() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval()); + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); } public void testBiggerButCompatibleInterval() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1d")); + .calendarInterval(new DateHistogramInterval("1d")); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); } public void testBiggerButCompatibleFixedInterval() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("100s"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("100s"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1000s")); + .fixedInterval(new DateHistogramInterval("1000s")); + + Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); + assertThat(bestCaps.size(), equalTo(1)); + } + + public void testBiggerButCompatibleFixedIntervalInCalFormat() { + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); + Set caps = singletonSet(cap); + + DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") + .fixedInterval(new DateHistogramInterval("7d")); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); } public void testBiggerButCompatibleFixedMillisInterval() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("100ms"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("100ms"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .interval(1000); + .fixedInterval(new DateHistogramInterval("1000ms")); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); } public void testIncompatibleInterval() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")); + .calendarInterval(new DateHistogramInterval("1h")); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " + @@ -110,13 +123,13 @@ public void testIncompatibleInterval() { } public void testIncompatibleFixedCalendarInterval() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("5d"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("5d"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("day")); + .calendarInterval(new DateHistogramInterval("day")); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " + @@ -124,13 +137,14 @@ public void testIncompatibleFixedCalendarInterval() { } public void testBadTimeZone() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "CET")); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), + null, "CET")); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")) + .calendarInterval(new DateHistogramInterval("1h")) .timeZone(ZoneOffset.UTC); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); @@ -139,7 +153,7 @@ public void testBadTimeZone() { } public void testMetricOnlyAgg() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final List metrics = singletonList(new MetricConfig("bar", singletonList("max"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); RollupJobCaps cap = new RollupJobCaps(job); @@ -152,13 +166,13 @@ public void testMetricOnlyAgg() { } public void testOneOfTwoMatchingCaps() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")) + .calendarInterval(new DateHistogramInterval("1h")) .subAggregation(new MaxAggregationBuilder("the_max").field("bar")); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); @@ -167,20 +181,20 @@ public void testOneOfTwoMatchingCaps() { } public void testTwoJobsSameRollupIndex() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = new HashSet<>(2); caps.add(cap); - final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); RollupJobCaps cap2 = new RollupJobCaps(job2); caps.add(cap2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")); + .calendarInterval(new DateHistogramInterval("1h")); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); @@ -189,7 +203,7 @@ public void testTwoJobsSameRollupIndex() { } public void testTwoJobsButBothPartialMatches() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final List metrics = singletonList(new MetricConfig("bar", singletonList("max"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); RollupJobCaps cap = new RollupJobCaps(job); @@ -202,7 +216,7 @@ public void testTwoJobsButBothPartialMatches() { caps.add(cap2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")) + .calendarInterval(new DateHistogramInterval("1h")) .subAggregation(new MaxAggregationBuilder("the_max").field("bar")) // <-- comes from job1 .subAggregation(new MinAggregationBuilder("the_min").field("bar")); // <-- comes from job2 @@ -212,17 +226,17 @@ public void testTwoJobsButBothPartialMatches() { } public void testComparableDifferentDateIntervals() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); - final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"))); final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1d")); + .calendarInterval(new DateHistogramInterval("1d")); Set caps = new HashSet<>(2); caps.add(cap); @@ -234,17 +248,17 @@ public void testComparableDifferentDateIntervals() { } public void testComparableDifferentDateIntervalsOnlyOneWorks() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); - final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"))); final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")); + .calendarInterval(new DateHistogramInterval("1h")); Set caps = new HashSet<>(2); caps.add(cap); @@ -256,18 +270,19 @@ public void testComparableDifferentDateIntervalsOnlyOneWorks() { } public void testComparableNoHistoVsHisto() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); final HistogramGroupConfig histoConfig = new HistogramGroupConfig(100L, "bar"); - final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), histoConfig, null); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), + histoConfig, null); final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")) + .calendarInterval(new DateHistogramInterval("1h")) .subAggregation(new HistogramAggregationBuilder("histo").field("bar").interval(100)); Set caps = new HashSet<>(2); @@ -280,18 +295,19 @@ public void testComparableNoHistoVsHisto() { } public void testComparableNoTermsVsTerms() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); final TermsGroupConfig termsConfig = new TermsGroupConfig("bar"); - final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, termsConfig); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), + null, termsConfig); final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(new DateHistogramInterval("1h")) + .calendarInterval(new DateHistogramInterval("1h")) .subAggregation(new TermsAggregationBuilder("histo", ValueType.STRING).field("bar")); Set caps = new HashSet<>(2); @@ -312,7 +328,7 @@ public void testHistoSameNameWrongTypeInCaps() { final GroupConfig group = new GroupConfig( // NOTE same name but wrong type - new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name null ); @@ -329,13 +345,13 @@ public void testHistoSameNameWrongTypeInCaps() { public void testMissingDateHisto() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.dateHistogramInterval(new DateHistogramInterval("1d")) + histo.calendarInterval(new DateHistogramInterval("1d")) .field("other_field") .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) ); final List metrics = Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); @@ -350,14 +366,14 @@ public void testMissingDateHisto() { public void testNoMatchingInterval() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.interval(1) + histo.fixedInterval(new DateHistogramInterval("1ms")) .field("foo") .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( // interval in job is much higher than agg interval above - new DateHistogramGroupConfig("foo", new DateHistogramInterval("100d"), null, DateTimeZone.UTC.getID()) + new DateHistogramGroupConfig.FixedInterval("foo", new DateHistogramInterval("100d"), null, DateTimeZone.UTC.getID()) ); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); Set caps = singletonSet(new RollupJobCaps(job)); @@ -369,14 +385,14 @@ public void testNoMatchingInterval() { public void testDateHistoMissingFieldInCaps() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.dateHistogramInterval(new DateHistogramInterval("1d")) + histo.calendarInterval(new DateHistogramInterval("1d")) .field("foo") .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( // NOTE different field from the one in the query - new DateHistogramGroupConfig("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) + new DateHistogramGroupConfig.CalendarInterval("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) ); final List metrics = Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); @@ -397,7 +413,7 @@ public void testHistoMissingFieldInCaps() { .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - new DateHistogramGroupConfig("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new DateHistogramGroupConfig.CalendarInterval("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name null ); @@ -420,7 +436,7 @@ public void testNoMatchingHistoInterval() { .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); final GroupConfig group = new GroupConfig( - new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name null ); @@ -440,7 +456,7 @@ public void testHistoIntervalNotMultiple() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, "UTC"), new HistogramGroupConfig(3L, "bar"), null); @@ -457,7 +473,7 @@ public void testHistoIntervalNotMultiple() { public void testMissingMetric() { int i = ESTestCase.randomIntBetween(0, 3); - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final List metrics = singletonList(new MetricConfig("foo", Arrays.asList("avg", "max", "min", "sum"))); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); Set caps = singletonSet(new RollupJobCaps(job)); @@ -487,37 +503,7 @@ public void testMissingMetric() { } public void testValidateFixedInterval() { - boolean valid = RollupJobIdentifierUtils.validateFixedInterval(100, new DateHistogramInterval("100ms")); - assertTrue(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(200, new DateHistogramInterval("100ms")); - assertTrue(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(1000, new DateHistogramInterval("200ms")); - assertTrue(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(5*60*1000, new DateHistogramInterval("5m")); - assertTrue(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(10*5*60*1000, new DateHistogramInterval("5m")); - assertTrue(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(100, new DateHistogramInterval("500ms")); - assertFalse(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(100, new DateHistogramInterval("5m")); - assertFalse(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(100, new DateHistogramInterval("minute")); - assertFalse(valid); - - valid = RollupJobIdentifierUtils.validateFixedInterval(100, new DateHistogramInterval("second")); - assertFalse(valid); - - // ----------- - // Same tests, with both being DateHistoIntervals - // ----------- - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), + boolean valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), new DateHistogramInterval("100ms")); assertTrue(valid); @@ -545,13 +531,11 @@ public void testValidateFixedInterval() { new DateHistogramInterval("5m")); assertFalse(valid); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), - new DateHistogramInterval("minute")); - assertFalse(valid); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), new DateHistogramInterval("minute"))); + assertThat(e.getMessage(), equalTo("failed to parse setting [date_histo.config.interval] with value " + + "[minute] as a time value: unit is missing or unrecognized")); - valid = RollupJobIdentifierUtils.validateFixedInterval(new DateHistogramInterval("100ms"), - new DateHistogramInterval("second")); - assertFalse(valid); } public void testValidateCalendarInterval() { @@ -590,8 +574,16 @@ public void testComparatorMixed() { List caps = new ArrayList<>(numCaps); for (int i = 0; i < numCaps; i++) { - DateHistogramInterval interval = getRandomInterval(); - GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", interval)); + DateHistogramInterval interval; + DateHistogramGroupConfig dateHistoConfig; + if (randomBoolean()) { + interval = getRandomCalendarInterval(); + dateHistoConfig = new DateHistogramGroupConfig.CalendarInterval("foo", interval); + } else { + interval = getRandomFixedInterval(); + dateHistoConfig = new DateHistogramGroupConfig.FixedInterval("foo", interval); + } + GroupConfig group = new GroupConfig(dateHistoConfig); RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); caps.add(cap); @@ -617,7 +609,7 @@ public void testComparatorFixed() { for (int i = 0; i < numCaps; i++) { DateHistogramInterval interval = getRandomFixedInterval(); - GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", interval)); + GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.FixedInterval("foo", interval)); RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); caps.add(cap); @@ -643,7 +635,7 @@ public void testComparatorCalendar() { for (int i = 0; i < numCaps; i++) { DateHistogramInterval interval = getRandomCalendarInterval(); - GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", interval)); + GroupConfig group = new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", interval)); RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); caps.add(cap); @@ -665,21 +657,22 @@ public void testComparatorCalendar() { public void testObsoleteTimezone() { // Job has "obsolete" timezone - DateHistogramGroupConfig dateHisto = new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "Canada/Mountain"); + DateHistogramGroupConfig dateHisto = new DateHistogramGroupConfig.CalendarInterval("foo", + new DateHistogramInterval("1h"), null, "Canada/Mountain"); GroupConfig group = new GroupConfig(dateHisto); RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval()) + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()) .timeZone(ZoneId.of("Canada/Mountain")); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval()) + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()) .timeZone(ZoneId.of("America/Edmonton")); bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); @@ -687,21 +680,21 @@ public void testObsoleteTimezone() { // now the reverse, job has "new" timezone - dateHisto = new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "America/Edmonton"); + dateHisto = new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, "America/Edmonton"); group = new GroupConfig(dateHisto); job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); cap = new RollupJobCaps(job); caps = singletonSet(cap); builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval()) + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()) .timeZone(ZoneId.of("Canada/Mountain")); bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval()) + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval()) .timeZone(ZoneId.of("America/Edmonton")); bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); @@ -712,20 +705,13 @@ private static long getMillis(RollupJobCaps cap) { for (RollupJobCaps.RollupFieldCaps fieldCaps : cap.getFieldCaps().values()) { for (Map agg : fieldCaps.getAggs()) { if (agg.get(RollupField.AGG).equals(DateHistogramAggregationBuilder.NAME)) { - return RollupJobIdentifierUtils.getMillisFixedOrCalendar((String) agg.get(RollupField.INTERVAL)); + return new DateHistogramInterval(RollupJobIdentifierUtils.retrieveInterval(agg)).estimateMillis(); } } } return Long.MAX_VALUE; } - private static DateHistogramInterval getRandomInterval() { - if (randomBoolean()) { - return getRandomFixedInterval(); - } - return getRandomCalendarInterval(); - } - private static DateHistogramInterval getRandomFixedInterval() { int value = randomIntBetween(1, 1000); String unit; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java index db58115489d2a..27dcc751860bc 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java @@ -57,18 +57,18 @@ public void setUp() throws Exception { public void testBasicDateHisto() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.dateHistogramInterval(new DateHistogramInterval("1d")) + histo.calendarInterval(new DateHistogramInterval("1d")) .field("foo") .extendedBounds(new ExtendedBounds(0L, 1000L)) .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - + List translated = translateAggregation(histo, namedWriteableRegistry); assertThat(translated.size(), equalTo(1)); assertThat(translated.get(0), Matchers.instanceOf(DateHistogramAggregationBuilder.class)); DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); - assertThat(translatedHisto.dateHistogramInterval(), equalTo(new DateHistogramInterval("1d"))); + assertThat(translatedHisto.getCalendarInterval(), equalTo(new DateHistogramInterval("1d"))); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); @@ -93,7 +93,7 @@ public void testBasicDateHisto() { public void testFormattedDateHisto() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.dateHistogramInterval(new DateHistogramInterval("1d")) + histo.calendarInterval(new DateHistogramInterval("1d")) .field("foo") .extendedBounds(new ExtendedBounds(0L, 1000L)) .format("yyyy-MM-dd") @@ -104,7 +104,7 @@ public void testFormattedDateHisto() { assertThat(translated.get(0), Matchers.instanceOf(DateHistogramAggregationBuilder.class)); DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); - assertThat(translatedHisto.dateHistogramInterval(), equalTo(new DateHistogramInterval("1d"))); + assertThat(translatedHisto.getCalendarInterval(), equalTo(new DateHistogramInterval("1d"))); assertThat(translatedHisto.format(), equalTo("yyyy-MM-dd")); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); } @@ -150,7 +150,7 @@ public void testUnsupportedMetric() { public void testDateHistoIntervalWithMinMax() { DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.dateHistogramInterval(new DateHistogramInterval("1d")) + histo.calendarInterval(new DateHistogramInterval("1d")) .field("foo") .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); @@ -160,7 +160,7 @@ public void testDateHistoIntervalWithMinMax() { assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); - assertThat(translatedHisto.dateHistogramInterval().toString(), equalTo("1d")); + assertThat(translatedHisto.getCalendarInterval().toString(), equalTo("1d")); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); @@ -195,7 +195,8 @@ public void testDateHistoLongIntervalWithMinMax() { assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); - assertThat(translatedHisto.interval(), equalTo(86400000L)); + assertNull(translatedHisto.getCalendarInterval()); + assertThat(translatedHisto.getFixedInterval(), equalTo(new DateHistogramInterval("86400000ms"))); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); assertThat(translatedHisto.getSubAggregations().size(), equalTo(4)); @@ -216,12 +217,15 @@ public void testDateHistoLongIntervalWithMinMax() { assertThat(subAggs.get("test_histo._count"), instanceOf(SumAggregationBuilder.class)); assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(), equalTo("foo.date_histogram._count")); + + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " + + "or [calendar_interval] in the future."); } public void testDateHistoWithTimezone() { ZoneId timeZone = ZoneId.of(randomFrom(ZoneId.getAvailableZoneIds())); DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); - histo.interval(86400000) + histo.fixedInterval(new DateHistogramInterval("86400000ms")) .field("foo") .timeZone(timeZone); @@ -230,11 +234,55 @@ public void testDateHistoWithTimezone() { assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); - assertThat(translatedHisto.interval(), equalTo(86400000L)); + assertThat(translatedHisto.getFixedInterval().toString(), equalTo("86400000ms")); assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); assertThat(translatedHisto.timeZone(), equalTo(timeZone)); } + public void testDeprecatedInterval() { + DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); + histo.interval(86400000).field("foo"); + + List translated = translateAggregation(histo, namedWriteableRegistry); + assertThat(translated.size(), equalTo(1)); + assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + + assertThat(translatedHisto.getFixedInterval().toString(), equalTo("86400000ms")); + assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " + + "or [calendar_interval] in the future."); + } + + public void testDeprecatedDateHistoInterval() { + DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo"); + histo.dateHistogramInterval(new DateHistogramInterval("1d")).field("foo"); + + List translated = translateAggregation(histo, namedWriteableRegistry); + assertThat(translated.size(), equalTo(1)); + assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); + DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + + assertThat(translatedHisto.dateHistogramInterval().toString(), equalTo("1d")); + assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " + + "or [calendar_interval] in the future."); + + + histo = new DateHistogramAggregationBuilder("test_histo"); + histo.dateHistogramInterval(new DateHistogramInterval("4d")).field("foo"); + + translated = translateAggregation(histo, namedWriteableRegistry); + assertThat(translated.size(), equalTo(1)); + assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class)); + translatedHisto = (DateHistogramAggregationBuilder)translated.get(0); + + assertThat(translatedHisto.dateHistogramInterval().toString(), equalTo("4d")); + assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp")); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " + + "or [calendar_interval] in the future."); + } + public void testAvgMetric() { List translated = translateAggregation(new AvgAggregationBuilder("test_metric") .field("foo"), namedWriteableRegistry); @@ -320,7 +368,6 @@ public void testBasicHisto() { assertThat(subAggs.get("test_histo._count"), Matchers.instanceOf(SumAggregationBuilder.class)); assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(), equalTo("foo.histogram._count")); - } public void testUnsupportedAgg() { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 849461f1b6202..84f0862183c44 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; @@ -474,7 +475,7 @@ public void testMismatch() throws IOException { = new GeoBoundsAggregationBuilder("histo").field("bar"); DateHistogramAggregationBuilder histoBuilder = new DateHistogramAggregationBuilder("histo") - .field("bar").interval(100); + .field("bar").fixedInterval(new DateHistogramInterval("100ms")); FilterAggregationBuilder filterBuilder = new FilterAggregationBuilder("filter", new TermQueryBuilder("foo", "bar")); filterBuilder.subAggregation(histoBuilder); @@ -518,11 +519,11 @@ public void testMismatch() throws IOException { public void testDateHisto() throws IOException { DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").interval(100); + .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .interval(100) + .fixedInterval(new DateHistogramInterval("100ms")) .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); @@ -562,12 +563,12 @@ public void testDateHisto() throws IOException { public void testDateHistoWithGap() throws IOException { DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").interval(100) + .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")) .minDocCount(0); DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .interval(100) + .fixedInterval(new DateHistogramInterval("100ms")) .minDocCount(0) .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); @@ -619,12 +620,12 @@ public void testDateHistoWithGap() throws IOException { public void testNonMatchingPartition() throws IOException { DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").interval(100) + .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")) .minDocCount(0); DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .interval(100) + .fixedInterval(new DateHistogramInterval("100ms")) .minDocCount(0) .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); @@ -732,11 +733,11 @@ public void testNonMatchingPartition() throws IOException { public void testDateHistoOverlappingAggTrees() throws IOException { DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").interval(100); + .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .interval(100) + .fixedInterval(new DateHistogramInterval("100ms")) .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); @@ -793,11 +794,11 @@ public void testDateHistoOverlappingAggTrees() throws IOException { public void testDateHistoOverlappingMergeRealIntoZero() throws IOException { DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").interval(100); + .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .interval(100) + .fixedInterval(new DateHistogramInterval("100ms")) .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); @@ -860,11 +861,11 @@ public void testDateHistoOverlappingMergeRealIntoZero() throws IOException { public void testDateHistoOverlappingMergeZeroIntoReal() throws IOException { DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").interval(100).minDocCount(0); + .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")).minDocCount(0); DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .interval(100) + .fixedInterval(new DateHistogramInterval("100ms")) .minDocCount(0) .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); @@ -1219,11 +1220,11 @@ public void testHisto() throws IOException { public void testOverlappingBuckets() throws IOException { DateHistogramAggregationBuilder nonRollupHisto = new DateHistogramAggregationBuilder("histo") - .field("timestamp").interval(100); + .field("timestamp").fixedInterval(new DateHistogramInterval("100ms")); DateHistogramAggregationBuilder rollupHisto = new DateHistogramAggregationBuilder("histo") .field("timestamp.date_histogram." + RollupField.TIMESTAMP) - .interval(100) + .fixedInterval(new DateHistogramInterval("100ms")) .subAggregation(new SumAggregationBuilder("histo." + RollupField.COUNT_FIELD) .field("timestamp.date_histogram." + RollupField.COUNT_FIELD)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index a795edca83ed3..b1ae36c538fec 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -100,7 +100,7 @@ public void testNonZeroSize() { SearchSourceBuilder source = new SearchSourceBuilder(); source.query(new MatchAllQueryBuilder()); source.size(100); - source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").interval(123)); + source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").fixedInterval(new DateHistogramInterval("123ms"))); SearchRequest request = new SearchRequest(normalIndices, source); NamedWriteableRegistry registry = mock(NamedWriteableRegistry.class); Exception e = expectThrows(IllegalArgumentException.class, @@ -111,7 +111,7 @@ public void testNonZeroSize() { public void testBadQuery() { SearchSourceBuilder source = new SearchSourceBuilder(); source.query(new MatchPhraseQueryBuilder("foo", "bar")); - source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").interval(123)); + source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").fixedInterval(new DateHistogramInterval("123ms"))); source.size(0); Exception e = expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.rewriteQuery(new MatchPhraseQueryBuilder("foo", "bar"), Collections.emptySet())); @@ -119,7 +119,8 @@ public void testBadQuery() { } public void testRangeTimezoneUTC() { - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -131,7 +132,8 @@ public void testRangeTimezoneUTC() { } public void testRangeNullTimeZone() { - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, null)); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, null)); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -143,7 +145,8 @@ public void testRangeNullTimeZone() { } public void testRangeDifferentTZ() { - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "UTC")); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"), null, "UTC")); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -155,7 +158,8 @@ public void testRangeDifferentTZ() { public void testTermQuery() { final TermsGroupConfig terms = new TermsGroupConfig("foo"); - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("boo", new DateHistogramInterval("1h")), null, terms); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("boo", new DateHistogramInterval("1h")), null, terms); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -167,7 +171,8 @@ public void testTermQuery() { public void testTermsQuery() { final TermsGroupConfig terms = new TermsGroupConfig("foo"); - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("boo", new DateHistogramInterval("1h")), null, terms); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("boo", new DateHistogramInterval("1h")), null, terms); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -181,7 +186,8 @@ public void testTermsQuery() { } public void testCompounds() { - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -195,7 +201,8 @@ public void testCompounds() { } public void testMatchAll() { - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -206,7 +213,8 @@ public void testMatchAll() { public void testAmbiguousResolution() { final TermsGroupConfig terms = new TermsGroupConfig("foo"); - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, terms); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), null, terms); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); @@ -254,7 +262,7 @@ public void testPostFilter() { TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, Collections.emptySet()); SearchSourceBuilder source = new SearchSourceBuilder(); - source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").interval(123)); + source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").fixedInterval(new DateHistogramInterval("123ms"))); source.postFilter(new TermQueryBuilder("foo", "bar")); source.size(0); SearchRequest request = new SearchRequest(normalIndices, source); @@ -355,7 +363,8 @@ public void testLiveOnlyCreateMSearch() { } public void testGood() { - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h"))); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = singleton(cap); @@ -371,7 +380,7 @@ public void testGood() { source.query(getQueryBuilder(1)); source.size(0); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(config.getGroupConfig().getDateHistogram().getInterval())); + .calendarInterval(config.getGroupConfig().getDateHistogram().getInterval())); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -396,11 +405,11 @@ public void testGoodButNullQuery() { SearchSourceBuilder source = new SearchSourceBuilder(); source.query(null); source.size(0); - source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").dateHistogramInterval(new DateHistogramInterval("1d"))); + source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").calendarInterval(new DateHistogramInterval("1d"))); SearchRequest request = new SearchRequest(combinedIndices, source); - final GroupConfig groupConfig = - new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); Set caps = singleton(new RollupJobCaps(job)); @@ -422,7 +431,8 @@ public void testGoodButNullQuery() { } public void testTwoMatchingJobs() { - final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, null); + final GroupConfig groupConfig = new GroupConfig( + new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), null, null); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); @@ -447,7 +457,7 @@ public void testTwoMatchingJobs() { source.query(getQueryBuilder(1)); source.size(0); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval())); + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval())); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -468,7 +478,7 @@ public void testTwoMatchingJobs() { public void testTwoMatchingJobsOneBetter() { final GroupConfig groupConfig = - new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, null); + new GroupConfig(new DateHistogramGroupConfig.CalendarInterval("foo", new DateHistogramInterval("1h")), null, null); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); @@ -494,7 +504,7 @@ public void testTwoMatchingJobsOneBetter() { source.query(getQueryBuilder(1)); source.size(0); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval())); + .calendarInterval(job.getGroupConfig().getDateHistogram().getInterval())); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java index 2f0612a65d2dc..32f05bed4e772 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; +import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig.CalendarInterval; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; @@ -57,36 +58,36 @@ public void testNoDateHisto() { public void testEmptyDateHistoField() { Exception e = expectThrows(IllegalArgumentException.class, - () -> new DateHistogramGroupConfig(null, DateHistogramInterval.HOUR)); + () -> new CalendarInterval(null, DateHistogramInterval.HOUR)); assertThat(e.getMessage(), equalTo("Field must be a non-null, non-empty string")); - e = expectThrows(IllegalArgumentException.class, () -> new DateHistogramGroupConfig("", DateHistogramInterval.HOUR)); + e = expectThrows(IllegalArgumentException.class, () -> new CalendarInterval("", DateHistogramInterval.HOUR)); assertThat(e.getMessage(), equalTo("Field must be a non-null, non-empty string")); } public void testEmptyDateHistoInterval() { - Exception e = expectThrows(IllegalArgumentException.class, () -> new DateHistogramGroupConfig("foo", null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> new CalendarInterval("foo", null)); assertThat(e.getMessage(), equalTo("Interval must be non-null")); } public void testNullTimeZone() { - DateHistogramGroupConfig config = new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR, null, null); + DateHistogramGroupConfig config = new CalendarInterval("foo", DateHistogramInterval.HOUR, null, null); assertThat(config.getTimeZone(), equalTo(DateTimeZone.UTC.getID())); } public void testEmptyTimeZone() { - DateHistogramGroupConfig config = new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR, null, ""); + DateHistogramGroupConfig config = new CalendarInterval("foo", DateHistogramInterval.HOUR, null, ""); assertThat(config.getTimeZone(), equalTo(DateTimeZone.UTC.getID())); } public void testDefaultTimeZone() { - DateHistogramGroupConfig config = new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR); + DateHistogramGroupConfig config = new CalendarInterval("foo", DateHistogramInterval.HOUR); assertThat(config.getTimeZone(), equalTo(DateTimeZone.UTC.getID())); } - public void testUnknownTimeZone() { + public void testUnkownTimeZone() { Exception e = expectThrows(ZoneRulesException.class, - () -> new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR, null, "FOO")); + () -> new CalendarInterval("foo", DateHistogramInterval.HOUR, null, "FOO")); assertThat(e.getMessage(), equalTo("Unknown time-zone ID: FOO")); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 38b90328a8743..080482735e342 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -101,8 +101,8 @@ public void testMissingFields() throws IOException { valueFieldType.setName(valueField); // Setup the composite agg - //TODO swap this over to DateHistoConfig.Builder once DateInterval is in - DateHistogramGroupConfig dateHistoGroupConfig = new DateHistogramGroupConfig(timestampField, DateHistogramInterval.DAY); + DateHistogramGroupConfig dateHistoGroupConfig + = new DateHistogramGroupConfig.CalendarInterval(timestampField, DateHistogramInterval.DAY); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, RollupIndexer.createValueSourceBuilders(dateHistoGroupConfig)); @@ -169,7 +169,7 @@ public void testCorrectFields() throws IOException { DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder("the_histo." + DateHistogramAggregationBuilder.NAME) .field(timestampField) - .interval(1); + .fixedInterval(new DateHistogramInterval("1ms")); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, singletonList(dateHisto)); @@ -292,7 +292,7 @@ public void testEmptyCounts() throws IOException { DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder("the_histo." + DateHistogramAggregationBuilder.NAME) .field(timestampField) - .dateHistogramInterval(new DateHistogramInterval("1d")); + .calendarInterval(new DateHistogramInterval("1d")); CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, singletonList(dateHisto)); @@ -607,7 +607,7 @@ public void testTimezone() throws IOException { DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder("the_histo." + DateHistogramAggregationBuilder.NAME) .field(timestampField) - .dateHistogramInterval(new DateHistogramInterval("1d")) + .calendarInterval(new DateHistogramInterval("1d")) .timeZone(ZoneId.of("-01:00", ZoneId.SHORT_IDS)); // adds a timezone so that we aren't on default UTC CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 18365c2b48501..b0b6dc8333731 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -49,6 +49,8 @@ import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; +import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig.CalendarInterval; +import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig.FixedInterval; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; @@ -96,7 +98,7 @@ private void setup() { public void testSimpleDateHisto() throws Exception { String rollupIndex = randomAlphaOfLength(10); String field = "the_histo"; - DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(field, new DateHistogramInterval("1ms")); + DateHistogramGroupConfig dateHistoConfig = new FixedInterval(field, new DateHistogramInterval("1ms")); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); final List> dataset = new ArrayList<>(); dataset.addAll( @@ -140,7 +142,7 @@ public void testSimpleDateHisto() throws Exception { public void testDateHistoAndMetrics() throws Exception { String rollupIndex = randomAlphaOfLength(10); String field = "the_histo"; - DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(field, new DateHistogramInterval("1h")); + DateHistogramGroupConfig dateHistoConfig = new CalendarInterval(field, new DateHistogramInterval("1h")); MetricConfig config = new MetricConfig("counter", Arrays.asList("avg", "sum", "max", "min")); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.singletonList(config)); final List> dataset = new ArrayList<>(); @@ -263,7 +265,7 @@ public void testSimpleDateHistoWithDelay() throws Exception { String rollupIndex = randomAlphaOfLengthBetween(5, 10); String field = "the_histo"; DateHistogramGroupConfig dateHistoConfig = - new DateHistogramGroupConfig(field, new DateHistogramInterval("1m"), new DateHistogramInterval("1h"), null); + new FixedInterval(field, new DateHistogramInterval("1m"), new DateHistogramInterval("1h"), null); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); final List> dataset = new ArrayList<>(); long now = System.currentTimeMillis(); @@ -344,7 +346,7 @@ public void testSimpleDateHistoWithTimeZone() throws Exception { String timeZone = DateTimeZone.forOffsetHours(-3).getID(); String rollupIndex = randomAlphaOfLengthBetween(5, 10); String field = "the_histo"; - DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(field, new DateHistogramInterval("1d"), null, timeZone); + DateHistogramGroupConfig dateHistoConfig = new CalendarInterval(field, new DateHistogramInterval("1d"), null, timeZone); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); executeTestCase(dataset, job, now, (resp) -> { @@ -404,9 +406,9 @@ public void testRandomizedDateHisto() throws Exception { String timestampField = "ts"; String valueField = "the_avg"; - String timeInterval = randomIntBetween(1, 10) + randomFrom("h", "m"); + String timeInterval = randomIntBetween(2, 10) + randomFrom("h", "m"); DateHistogramGroupConfig dateHistoConfig = - new DateHistogramGroupConfig(timestampField, new DateHistogramInterval(timeInterval)); + new FixedInterval(timestampField, new DateHistogramInterval(timeInterval)); MetricConfig metricConfig = new MetricConfig(valueField, Collections.singletonList("avg")); RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.singletonList(metricConfig)); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml index 9438878417741..e0b35a64fb479 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml @@ -67,7 +67,12 @@ setup: --- "Test preview transform": + - skip: + reason: date histo interval is deprecated + features: "warnings" - do: + warnings: + - "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future." data_frame.preview_data_frame_transform: body: > { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml index 5dda4f3def672..d8ee4926e97d4 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml @@ -319,7 +319,7 @@ setup: "histogram_buckets":{ "date_histogram": { "field": "@timestamp", - "interval": "5m", + "fixed_interval": "5m", "time_zone": "UTC", "min_doc_count": 0 }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index 1710e51c32bdc..2b8f44be286a1 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -26,7 +26,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -54,7 +54,7 @@ setup: page_size: 10 groups : date_histogram: - interval: "1h" + calendar_interval: "1h" field: "the_field" time_zone: "UTC" metrics: @@ -107,7 +107,7 @@ setup: page_size: 10 groups : date_histogram: - interval: "1h" + calendar_interval: "1h" field: "the_field" time_zone: "UTC" metrics: @@ -160,7 +160,7 @@ setup: page_size: 10 groups : date_histogram: - interval: "1h" + calendar_interval: "1h" field: "the_field" time_zone: "UTC" metrics: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml index cd00a6f717b02..c7e9da5aeddc7 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -29,7 +29,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -55,7 +55,7 @@ setup: page_size: 10 groups : date_histogram: - interval: "1h" + calendar_interval: "1h" field: "the_field" time_zone: "UTC" metrics: @@ -119,7 +119,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -145,7 +145,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -172,7 +172,7 @@ setup: page_size: 10 groups : date_histogram: - interval: "1h" + calendar_interval: "1h" field: "the_field" time_zone: "UTC" metrics: @@ -198,7 +198,7 @@ setup: page_size: 10 groups : date_histogram: - interval: "1h" + calendar_interval: "1h" field: "the_field" time_zone: "UTC" metrics: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml index 3d38f4a371234..42acd41097bf2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml @@ -47,7 +47,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -74,7 +74,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -98,7 +98,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -121,7 +121,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -133,7 +133,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -157,7 +157,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -182,7 +182,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -206,7 +206,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -218,7 +218,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -233,7 +233,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml index e4b98b9492087..cbed3770ef22c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml @@ -47,7 +47,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -74,7 +74,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -98,7 +98,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -121,7 +121,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -133,7 +133,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -158,7 +158,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -181,7 +181,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -205,7 +205,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -229,7 +229,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -254,7 +254,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -266,7 +266,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -280,7 +280,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -308,7 +308,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -332,7 +332,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -357,7 +357,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -369,7 +369,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -383,7 +383,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" @@ -407,7 +407,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -431,7 +431,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -456,7 +456,7 @@ setup: fields: the_field: - agg: "date_histogram" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" value_field: - agg: "min" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml index 7983778108bd0..7226dcb7e136b 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml @@ -29,7 +29,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -55,7 +55,7 @@ setup: page_size: 10 groups : date_histogram: - interval: "1h" + calendar_interval: "1h" field: "the_field" time_zone: "UTC" metrics: @@ -97,7 +97,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -124,7 +124,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -155,7 +155,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -188,7 +188,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -217,7 +217,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ @@ -246,7 +246,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml index 0f3488d146a00..0e052d33281e2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml @@ -28,7 +28,7 @@ setup: "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h" + "calendar_interval": "1h" }, "terms": { "fields": ["partition"] @@ -135,7 +135,34 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" + time_zone: "UTC" + + - length: { aggregations.histo.buckets: 4 } + - match: { aggregations.histo.buckets.0.key_as_string: "2017-01-01T05:00:00.000Z" } + - match: { aggregations.histo.buckets.0.doc_count: 1 } + - match: { aggregations.histo.buckets.1.key_as_string: "2017-01-01T06:00:00.000Z" } + - match: { aggregations.histo.buckets.1.doc_count: 2 } + - match: { aggregations.histo.buckets.2.key_as_string: "2017-01-01T07:00:00.000Z" } + - match: { aggregations.histo.buckets.2.doc_count: 10 } + - match: { aggregations.histo.buckets.3.key_as_string: "2017-01-01T08:00:00.000Z" } + - match: { aggregations.histo.buckets.3.doc_count: 20 } + +--- +"Basic Search with rest_total_hits_as_int": + - skip: + version: " - 6.5.99" + reason: rest_total_hits_as_int was introduced in 6.6.0 + - do: + rollup.rollup_search: + index: "foo_rollup" + body: + size: 0 + aggs: + histo: + date_histogram: + field: "timestamp" + calendar_interval: "1h" time_zone: "UTC" - length: { aggregations.histo.buckets: 4 } @@ -160,7 +187,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" format: "yyyy-MM-dd" @@ -218,7 +245,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" aggs: the_max: @@ -254,7 +281,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" aggs: the_max: @@ -291,7 +318,7 @@ setup: "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h" + "calendar_interval": "1h" }, "terms": { "fields": ["partition"] @@ -396,7 +423,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" aggs: the_max: @@ -434,7 +461,7 @@ setup: "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h" + "calendar_interval": "1h" }, "terms": { "fields": ["partition"] @@ -542,7 +569,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" aggs: the_max: @@ -579,7 +606,7 @@ setup: "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1d" + "calendar_interval": "1d" }, "terms": { "fields": ["partition"] @@ -686,7 +713,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" aggs: the_max: @@ -718,7 +745,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" - length: { aggregations.histo.buckets: 4 } @@ -761,7 +788,7 @@ setup: "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h" + "calendar_interval": "1h" }, "terms": { "fields": ["partition"] @@ -785,7 +812,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" @@ -806,7 +833,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" - length: { aggregations.histo.buckets: 4 } @@ -849,7 +876,7 @@ setup: "groups" : { "date_histogram": { "field": "timestamp", - "interval": "1h" + "calendar_interval": "1h" }, "terms": { "fields": ["partition"] @@ -878,7 +905,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" --- @@ -984,7 +1011,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "5m" + fixed_interval: "5m" time_zone: "America/Edmonton" aggs: the_max: @@ -1011,7 +1038,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "5m" + fixed_interval: "5m" time_zone: "Canada/Mountain" aggs: the_max: @@ -1137,7 +1164,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "5m" + fixed_interval: "5m" time_zone: "America/Edmonton" aggs: the_max: @@ -1165,7 +1192,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "5m" + fixed_interval: "5m" time_zone: "Canada/Mountain" aggs: the_max: @@ -1197,7 +1224,7 @@ setup: histo: date_histogram: field: "timestamp" - interval: "1h" + calendar_interval: "1h" time_zone: "UTC" aggs: the_max: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml index fbf9e8519059a..371f7c7207fa3 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml @@ -26,7 +26,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml index 7e8b6b3f61af0..e7b81831c650e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml @@ -26,7 +26,7 @@ setup: "groups" : { "date_histogram": { "field": "the_field", - "interval": "1h" + "calendar_interval": "1h" } }, "metrics": [ diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 322e97db765ff..8031affd1912f 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -247,6 +247,13 @@ public void testRollupAfterRestart() throws Exception { // create the rollup job final Request createRollupJobRequest = new Request("PUT", getRollupEndpoint() + "/job/rollup-job-test"); + String intervalType; + if (getOldClusterVersion().onOrAfter(Version.V_8_0_0)) { // TODO change this after backport + intervalType = "fixed_interval"; + } else { + intervalType = "interval"; + } + createRollupJobRequest.setJsonEntity("{" + "\"index_pattern\":\"rollup-*\"," + "\"rollup_index\":\"results-rollup\"," @@ -255,7 +262,7 @@ public void testRollupAfterRestart() throws Exception { + "\"groups\":{" + " \"date_histogram\":{" + " \"field\":\"timestamp\"," - + " \"interval\":\"5m\"" + + " \"" + intervalType + "\":\"5m\"" + " }" + "}," + "\"metrics\":[" diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java index a5579ad0aa5d8..604c9a17a5e04 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -114,7 +114,7 @@ public void testBigRollup() throws Exception { + "\"groups\":{" + " \"date_histogram\":{" + " \"field\":\"timestamp\"," - + " \"interval\":\"5m\"" + + " \"fixed_interval\":\"5m\"" + " }" + "}," + "\"metrics\":[" @@ -158,7 +158,7 @@ public void testBigRollup() throws Exception { " \"date_histo\": {\n" + " \"date_histogram\": {\n" + " \"field\": \"timestamp\",\n" + - " \"interval\": \"60m\",\n" + + " \"fixed_interval\": \"60m\",\n" + " \"format\": \"date_time\"\n" + " },\n" + " \"aggs\": {\n" + diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 8b5d1f1ff72b7..58dac6b8f2510 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -90,6 +90,7 @@ task copyTestNodeKeyMaterial(type: Copy) { for (Version version : bwcVersions.wireCompatible) { String baseName = "v${version}" + Task oldClusterTest = tasks.create(name: "${baseName}#oldClusterTest", type: RestIntegTestTask) { mustRunAfter(precommit) @@ -165,6 +166,7 @@ for (Version version : bwcVersions.wireCompatible) { Task oldClusterTestRunner = tasks.getByName("${baseName}#oldClusterTestRunner") oldClusterTestRunner.configure { systemProperty 'tests.rest.suite', 'old_cluster' + systemProperty 'tests.upgrade_from_version', version.toString().replace('-SNAPSHOT', '') } Closure configureUpgradeCluster = {String name, Task lastRunner, int stopNode, Closure getOtherUnicastHostAddresses -> @@ -221,6 +223,7 @@ for (Version version : bwcVersions.wireCompatible) { oneThirdUpgradedTestRunner.configure { systemProperty 'tests.rest.suite', 'mixed_cluster' systemProperty 'tests.first_round', 'true' + systemProperty 'tests.upgrade_from_version', version.toString().replace('-SNAPSHOT', '') // We only need to run these tests once so we may as well do it when we're two thirds upgraded systemProperty 'tests.rest.blacklist', [ 'mixed_cluster/10_basic/Start scroll in mixed cluster on upgraded node that we will continue after upgrade', @@ -241,6 +244,7 @@ for (Version version : bwcVersions.wireCompatible) { twoThirdsUpgradedTestRunner.configure { systemProperty 'tests.rest.suite', 'mixed_cluster' systemProperty 'tests.first_round', 'false' + systemProperty 'tests.upgrade_from_version', version.toString().replace('-SNAPSHOT', '') finalizedBy "${baseName}#oldClusterTestCluster#node2.stop" } @@ -253,6 +257,7 @@ for (Version version : bwcVersions.wireCompatible) { Task upgradedClusterTestRunner = tasks.getByName("${baseName}#upgradedClusterTestRunner") upgradedClusterTestRunner.configure { systemProperty 'tests.rest.suite', 'upgraded_cluster' + systemProperty 'tests.upgrade_from_version', version.toString().replace('-SNAPSHOT', '') /* * Force stopping all the upgraded nodes after the test runner * so they are alive during the test. diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java new file mode 100644 index 0000000000000..03c28c05e616b --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java @@ -0,0 +1,258 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.upgrades; + +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.xcontent.ObjectPath; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.time.Instant; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; + + +public class RollupDateHistoUpgradeIT extends AbstractUpgradeTestCase { + private static final Version UPGRADE_FROM_VERSION = + Version.fromString(System.getProperty("tests.upgrade_from_version")); + + public void testDateHistoIntervalUpgrade() throws Exception { + assumeTrue("DateHisto interval changed in 7.1", UPGRADE_FROM_VERSION.before(Version.V_8_0_0)); // TODO change this after backport + switch (CLUSTER_TYPE) { + case OLD: + break; + case MIXED: + Request waitForYellow = new Request("GET", "/_cluster/health"); + waitForYellow.addParameter("wait_for_nodes", "3"); + waitForYellow.addParameter("wait_for_status", "yellow"); + client().performRequest(waitForYellow); + break; + case UPGRADED: + Request waitForGreen = new Request("GET", "/_cluster/health/target,rollup"); + waitForGreen.addParameter("wait_for_nodes", "3"); + waitForGreen.addParameter("wait_for_status", "green"); + // wait for long enough that we give delayed unassigned shards to stop being delayed + waitForGreen.addParameter("timeout", "70s"); + waitForGreen.addParameter("level", "shards"); + client().performRequest(waitForGreen); + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + + OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC); + + if (CLUSTER_TYPE == ClusterType.OLD) { + String recoverQuickly = "{\"settings\": {\"index.unassigned.node_left.delayed_timeout\": \"100ms\"}}"; + + Request createTargetIndex = new Request("PUT", "/target"); + createTargetIndex.setJsonEntity(recoverQuickly); + client().performRequest(createTargetIndex); + + final Request indexRequest = new Request("POST", "/target/_doc/1"); + indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.toString() + "\",\"value\":123}"); + client().performRequest(indexRequest); + + // create the rollup job with an old interval style + final Request createRollupJobRequest = new Request("PUT", "_rollup/job/rollup-id-test"); + createRollupJobRequest.setJsonEntity("{" + + "\"index_pattern\":\"target\"," + + "\"rollup_index\":\"rollup\"," + + "\"cron\":\"*/1 * * * * ?\"," + + "\"page_size\":100," + + "\"groups\":{" + + " \"date_histogram\":{" + + " \"field\":\"timestamp\"," + + " \"interval\":\"5m\"" + + " }," + + "\"histogram\":{" + + " \"fields\": [\"value\"]," + + " \"interval\":1" + + " }," + + "\"terms\":{" + + " \"fields\": [\"value\"]" + + " }" + + "}," + + "\"metrics\":[" + + " {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + + "]" + + "}"); + + Map createRollupJobResponse = entityAsMap(client().performRequest(createRollupJobRequest)); + assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + Request updateSettings = new Request("PUT", "/rollup/_settings"); + updateSettings.setJsonEntity(recoverQuickly); + client().performRequest(updateSettings); + + // start the rollup job + final Request startRollupJobRequest = new Request("POST", "_rollup/job/rollup-id-test/_start"); + Map startRollupJobResponse = entityAsMap(client().performRequest(startRollupJobRequest)); + assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); + + assertRollUpJob("rollup-id-test"); + List ids = getSearchResults(1); + assertThat(ids.toString(), ids, containsInAnyOrder("rollup-id-test$AuaduUZW8tgWmFP87DgzSA")); + } + + if (CLUSTER_TYPE == ClusterType.MIXED && Booleans.parseBoolean(System.getProperty("tests.first_round"))) { + final Request indexRequest = new Request("POST", "/target/_doc/2"); + indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.plusDays(1).toString() + "\",\"value\":345}"); + client().performRequest(indexRequest); + + assertRollUpJob("rollup-id-test"); + client().performRequest(new Request("POST", "rollup/_refresh")); + + List ids = getSearchResults(2); + assertThat(ids.toString(), ids, containsInAnyOrder("rollup-id-test$AuaduUZW8tgWmFP87DgzSA", + "rollup-id-test$ehY4NAyVSy8xxUDZrNXXIA")); + } + + if (CLUSTER_TYPE == ClusterType.MIXED && Booleans.parseBoolean(System.getProperty("tests.first_round")) == false) { + final Request indexRequest = new Request("POST", "/target/_doc/3"); + indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.plusDays(2).toString() + "\",\"value\":456}"); + client().performRequest(indexRequest); + + assertRollUpJob("rollup-id-test"); + client().performRequest(new Request("POST", "rollup/_refresh")); + + List ids = getSearchResults(3); + assertThat(ids.toString(), ids, containsInAnyOrder("rollup-id-test$AuaduUZW8tgWmFP87DgzSA", + "rollup-id-test$ehY4NAyVSy8xxUDZrNXXIA", "rollup-id-test$60RGDSb92YI5LH4_Fnq_1g")); + + } + + if (CLUSTER_TYPE == ClusterType.UPGRADED) { + final Request indexRequest = new Request("POST", "/target/_doc/4"); + indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.plusDays(3).toString() + "\",\"value\":567}"); + client().performRequest(indexRequest); + + assertRollUpJob("rollup-id-test"); + client().performRequest(new Request("POST", "rollup/_refresh")); + + List ids = getSearchResults(4); + assertThat(ids.toString(), ids, containsInAnyOrder("rollup-id-test$AuaduUZW8tgWmFP87DgzSA", + "rollup-id-test$ehY4NAyVSy8xxUDZrNXXIA", "rollup-id-test$60RGDSb92YI5LH4_Fnq_1g", "rollup-id-test$LAKZftDeQwsUtdPixrkkzQ")); + } + + } + + private List getSearchResults(int expectedCount) throws Exception { + final List collectedIDs = new ArrayList<>(); + assertBusy(() -> { + collectedIDs.clear(); + client().performRequest(new Request("POST", "rollup/_refresh")); + final Request searchRequest = new Request("GET", "rollup/_search"); + try { + Map searchResponse = entityAsMap(client().performRequest(searchRequest)); + assertNotNull(ObjectPath.eval("hits.total.value", searchResponse)); + assertThat(ObjectPath.eval("hits.total.value", searchResponse), equalTo(expectedCount)); + + for (int i = 0; i < expectedCount; i++) { + String id = ObjectPath.eval("hits.hits." + i + "._id", searchResponse); + collectedIDs.add(id); + Map doc = ObjectPath.eval("hits.hits." + i + "._source", searchResponse); + assertNotNull(doc); + } + } catch (IOException e) { + fail(); + } + }); + return collectedIDs; + } + + @SuppressWarnings("unchecked") + private void assertRollUpJob(final String rollupJob) throws Exception { + final Matcher expectedStates = anyOf(equalTo("indexing"), equalTo("started")); + waitForRollUpJob(rollupJob, expectedStates); + + // check that the rollup job is started using the RollUp API + final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + Map getRollupJobResponse = entityAsMap(client().performRequest(getRollupJobRequest)); + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); + } + + // check that the rollup job is started using the Tasks API + final Request taskRequest = new Request("GET", "_tasks"); + taskRequest.addParameter("detailed", "true"); + taskRequest.addParameter("actions", "xpack/rollup/*"); + Map taskResponse = entityAsMap(client().performRequest(taskRequest)); + Map taskResponseNodes = (Map) taskResponse.get("nodes"); + Map taskResponseNode = (Map) taskResponseNodes.values().iterator().next(); + Map taskResponseTasks = (Map) taskResponseNode.get("tasks"); + Map taskResponseStatus = (Map) taskResponseTasks.values().iterator().next(); + assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), expectedStates); + + // check that the rollup job is started using the Cluster State API + final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata"); + Map clusterStateResponse = entityAsMap(client().performRequest(clusterStateRequest)); + List> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse); + + boolean hasRollupTask = false; + for (Map task : rollupJobTasks) { + if (ObjectPath.eval("id", task).equals(rollupJob)) { + hasRollupTask = true; + break; + } + } + if (hasRollupTask == false) { + fail("Expected persistent task for [" + rollupJob + "] but none found."); + } + + } + + private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { + assertBusy(() -> { + final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + Response getRollupJobResponse = client().performRequest(getRollupJobRequest); + assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); + + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); + } + }, 30L, TimeUnit.SECONDS); + } + + private static Map getJob(Response response, String targetJobId) throws IOException { + return getJob(ESRestTestCase.entityAsMap(response), targetJobId); + } + + @SuppressWarnings("unchecked") + private static Map getJob(Map jobsMap, String targetJobId) throws IOException { + + List> jobs = + (List>) XContentMapValues.extractValue("jobs", jobsMap); + + if (jobs == null) { + return null; + } + + for (Map job : jobs) { + String jobId = (String) ((Map) job.get("config")).get("id"); + if (jobId.equals(targetJobId)) { + return job; + } + } + return null; + } +} diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index 18e9f66603a0e..e9790e69b3cbe 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -17,7 +17,11 @@ --- "Test old cluster datafeed with aggs": + - skip: + features: "warnings" - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' ml.get_datafeeds: datafeed_id: old-cluster-datafeed-with-aggs - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-with-aggs"} @@ -81,6 +85,8 @@ --- "Put job and datafeed with aggs in mixed cluster": + - skip: + features: "warnings" - do: ml.put_job: @@ -103,6 +109,8 @@ } - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' ml.put_datafeed: datafeed_id: mixed-cluster-datafeed-with-aggs body: > diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml index 597540d36c4ec..bce9c25c08c03 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml @@ -46,7 +46,10 @@ - is_false: datafeeds.0.node --- -"Put job and datafeed with aggs in old cluster": +"Put job and datafeed with aggs in old cluster - pre-deprecated interval": + - skip: + version: "8.0.0 - " #TODO change this after backport + reason: calendar_interval introduced in 7.1.0 - do: ml.put_job: @@ -111,3 +114,76 @@ datafeed_id: old-cluster-datafeed-with-aggs - match: { datafeeds.0.state: stopped} - is_false: datafeeds.0.node + +--- +"Put job and datafeed with aggs in old cluster - deprecated interval with warning": + - skip: + version: " - 7.99.99" #TODO change this after backport + reason: calendar_interval introduced in 7.1.0 + features: warnings + + - do: + ml.put_job: + job_id: old-cluster-datafeed-job-with-aggs + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "summary_count_field_name": "doc_count", + "detectors" :[{"function":"count"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time" + } + } + - match: { job_id: old-cluster-datafeed-job-with-aggs } + + - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' + ml.put_datafeed: + datafeed_id: old-cluster-datafeed-with-aggs + body: > + { + "job_id":"old-cluster-datafeed-job-with-aggs", + "indices":["airline-data"], + "scroll_size": 2000, + "aggregations": { + "buckets": { + "date_histogram": { + "field": "time", + "interval": "30s", + "time_zone": "UTC" + }, + "aggregations": { + "time": { + "max": {"field": "time"} + }, + "airline": { + "terms": { + "field": "airline", + "size": 100 + }, + "aggregations": { + "responsetime": { + "avg": { + "field": "responsetime" + } + } + } + } + } + } + } + } + + - do: + ml.get_datafeed_stats: + datafeed_id: old-cluster-datafeed-with-aggs + - match: { datafeeds.0.state: stopped} + - is_false: datafeeds.0.node diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml index f2dbb2e80dc8e..089c689d900fa 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml @@ -105,6 +105,8 @@ setup: --- "Test old and mixed cluster datafeeds with aggs": + - skip: + features: "warnings" - do: indices.create: index: airline-data @@ -115,6 +117,8 @@ setup: type: date - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' ml.get_datafeeds: datafeed_id: old-cluster-datafeed-with-aggs - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-with-aggs"} @@ -131,6 +135,8 @@ setup: - is_false: datafeeds.0.node - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' ml.get_datafeeds: datafeed_id: mixed-cluster-datafeed-with-aggs - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed-with-aggs"} @@ -151,6 +157,8 @@ setup: job_id: old-cluster-datafeed-job-with-aggs - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' ml.start_datafeed: datafeed_id: old-cluster-datafeed-with-aggs start: 0 @@ -177,6 +185,8 @@ setup: job_id: mixed-cluster-datafeed-job-with-aggs - do: + warnings: + - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' ml.start_datafeed: datafeed_id: mixed-cluster-datafeed-with-aggs start: 0 From fe5a9fbc2d1196cb9226c215352c34f38771e63a Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Mon, 6 May 2019 17:30:18 -0400 Subject: [PATCH 2/7] Update version skips --- .../rest-api-spec/test/search.aggregation/10_histogram.yml | 5 +++-- .../rest-api-spec/test/search.aggregation/230_composite.yml | 4 ++-- .../rest-api-spec/test/search.aggregation/250_moving_fn.yml | 4 ++-- .../rest-api-spec/test/search.aggregation/80_typed_keys.yml | 4 +--- .../resources/rest-api-spec/test/search/240_date_nanos.yml | 2 +- .../aggregations/bucket/histogram/DateIntervalWrapper.java | 4 ++-- .../elasticsearch/xpack/restart/FullClusterRestartIT.java | 2 +- .../org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java | 2 +- 8 files changed, 13 insertions(+), 14 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml index f07ac96e67e98..3c4dba98ab943 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml @@ -143,7 +143,8 @@ setup: "Deprecated _time order": - skip: - reason: _time order deprecated in 6.0, replaced by _key + version: " - 7.1.99" + reason: _time order deprecated in 6.0, replaced by _key. Calendar_interval added in 7.2 features: "warnings" - do: @@ -176,7 +177,7 @@ setup: - do: search: rest_total_hits_as_int: true - body: { "aggs" : { "histo" : { "date_histogram" : { "field" : "date", "interval" : "month", "order" : { "_time" : "desc" } } } } } + body: { "aggs" : { "histo" : { "date_histogram" : { "field" : "date", "calendar_interval" : "month", "order" : { "_time" : "desc" } } } } } warnings: - "Deprecated aggregation order key [_time] used, replaced by [_key]" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index c643be01613b6..88fb807ba2e5f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -264,7 +264,7 @@ setup: --- "Composite aggregation with format": - skip: - version: " - 7.1.99" #TODO change this after backport + version: " - 7.1.99" reason: calendar_interval introduced in 7.2.0 features: warnings @@ -330,7 +330,7 @@ setup: --- "Composite aggregation with format and calendar_interval": - skip: - version: " - 7.1.99" #TODO change this after backport + version: " - 7.1.99" reason: calendar_interval introduced in 7.2.0 - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml index 433407f90575a..14e626b94e79a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml @@ -7,7 +7,7 @@ setup: "Bad window": - skip: - version: " - 7.99.0" #TODO change this after backport + version: " - 7.1.99" reason: "calendar_interval added in 7.2" - do: @@ -35,7 +35,7 @@ setup: "Bad window deprecated interval": - skip: - version: " - 7.99.0" #TODO change this after backport + version: " - 7.1.99" reason: "interval deprecation added in 7.2" features: "warnings" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml index 01309274f05b4..d041432556430 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml @@ -206,11 +206,9 @@ setup: --- "Test typed keys parameter for date_histogram aggregation and max_bucket pipeline aggregation": - skip: - version: " - 7.1.99" #TODO change this after backport + version: " - 7.1.99" reason: "calendar_interval added in 7.2" - do: - warnings: - - 'The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.' search: rest_total_hits_as_int: true typed_keys: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml index a775e51a712c2..6aaece2f9c58d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml @@ -125,7 +125,7 @@ setup: --- "date histogram aggregation with date and date_nanos mapping": - skip: - version: " - 7.99.99" #TODO change this after backport + version: " - 7.1.99" reason: calendar_interval introduced in 7.2.0 - do: diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java index b86989fce168d..c7759be99e62f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java @@ -113,7 +113,7 @@ public static void declareIntervalFields(Object public DateIntervalWrapper() {} public DateIntervalWrapper(StreamInput in) throws IOException { - if (in.getVersion().before(Version.V_8_0_0)) { // TODO change this after backport + if (in.getVersion().before(Version.V_7_2_0)) { long interval = in.readLong(); DateHistogramInterval histoInterval = in.readOptionalWriteable(DateHistogramInterval::new); @@ -370,7 +370,7 @@ public boolean isEmpty() { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().before(Version.V_8_0_0)) { // TODO change this after backport + if (out.getVersion().before(Version.V_7_2_0)) { if (intervalType.equals(IntervalTypeEnum.LEGACY_INTERVAL)) { out.writeLong(TimeValue.parseTimeValue(dateHistogramInterval.toString(), DateHistogramAggregationBuilder.NAME + ".innerWriteTo").getMillis()); diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 8031affd1912f..d7355269a1156 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -248,7 +248,7 @@ public void testRollupAfterRestart() throws Exception { final Request createRollupJobRequest = new Request("PUT", getRollupEndpoint() + "/job/rollup-job-test"); String intervalType; - if (getOldClusterVersion().onOrAfter(Version.V_8_0_0)) { // TODO change this after backport + if (getOldClusterVersion().onOrAfter(Version.V_7_2_0)) { intervalType = "fixed_interval"; } else { intervalType = "interval"; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java index 03c28c05e616b..08ad9f09d599c 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java @@ -34,7 +34,7 @@ public class RollupDateHistoUpgradeIT extends AbstractUpgradeTestCase { Version.fromString(System.getProperty("tests.upgrade_from_version")); public void testDateHistoIntervalUpgrade() throws Exception { - assumeTrue("DateHisto interval changed in 7.1", UPGRADE_FROM_VERSION.before(Version.V_8_0_0)); // TODO change this after backport + assumeTrue("DateHisto interval changed in 7.2", UPGRADE_FROM_VERSION.before(Version.V_7_2_0)); switch (CLUSTER_TYPE) { case OLD: break; From 2f1dc24f64c069ae588dac25c0ad33dff544352e Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Tue, 7 May 2019 15:49:29 -0400 Subject: [PATCH 3/7] Update movavg doc tests --- .../pipeline/movavg-aggregation.asciidoc | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index 06641391ced32..7c80e4797ba4f 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -62,7 +62,7 @@ POST /_search "my_date_histo":{ <1> "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -165,7 +165,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -219,7 +219,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -279,7 +279,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -338,7 +338,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -427,7 +427,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -488,7 +488,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -538,7 +538,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ @@ -617,7 +617,7 @@ POST /_search "my_date_histo":{ "date_histogram":{ "field":"date", - "interval":"1M" + "calendar_interval":"1M" }, "aggs":{ "the_sum":{ From 07876582f90ec0a7e3e3b92973f4c613dcf768af Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Wed, 8 May 2019 15:43:30 -0400 Subject: [PATCH 4/7] Tweak rolling restart test for 6x, 7x --- .../upgrades/RollupDateHistoUpgradeIT.java | 80 ++++++++++++++----- 1 file changed, 62 insertions(+), 18 deletions(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java index 08ad9f09d599c..7c594b7eef500 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java @@ -7,6 +7,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.xcontent.ObjectPath; @@ -34,7 +35,6 @@ public class RollupDateHistoUpgradeIT extends AbstractUpgradeTestCase { Version.fromString(System.getProperty("tests.upgrade_from_version")); public void testDateHistoIntervalUpgrade() throws Exception { - assumeTrue("DateHisto interval changed in 7.2", UPGRADE_FROM_VERSION.before(Version.V_7_2_0)); switch (CLUSTER_TYPE) { case OLD: break; @@ -60,18 +60,21 @@ public void testDateHistoIntervalUpgrade() throws Exception { OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC); if (CLUSTER_TYPE == ClusterType.OLD) { - String recoverQuickly = "{\"settings\": {\"index.unassigned.node_left.delayed_timeout\": \"100ms\"}}"; + String rollupEndpoint = UPGRADE_FROM_VERSION.before(Version.V_7_0_0) ? "_xpack/rollup" : "_rollup"; + + String settings = "{\"settings\": {\"index.unassigned.node_left.delayed_timeout\": \"100ms\", \"number_of_shards\": 1}}"; Request createTargetIndex = new Request("PUT", "/target"); - createTargetIndex.setJsonEntity(recoverQuickly); + createTargetIndex.setJsonEntity(settings); client().performRequest(createTargetIndex); final Request indexRequest = new Request("POST", "/target/_doc/1"); indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.toString() + "\",\"value\":123}"); client().performRequest(indexRequest); + client().performRequest(new Request("POST", "target/_refresh")); // create the rollup job with an old interval style - final Request createRollupJobRequest = new Request("PUT", "_rollup/job/rollup-id-test"); + final Request createRollupJobRequest = new Request("PUT", rollupEndpoint + "/job/rollup-id-test"); createRollupJobRequest.setJsonEntity("{" + "\"index_pattern\":\"target\"," + "\"rollup_index\":\"rollup\"," @@ -94,20 +97,28 @@ public void testDateHistoIntervalUpgrade() throws Exception { + " {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + "]" + "}"); + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.setWarningsHandler(warnings -> { + warnings.remove("the default number of shards will change from [5] to [1] in 7.0.0; if you wish to continue using " + + "the default of [5] shards, you must manage this on the create index request or with an index template"); + return warnings.size() > 0; + }); + createRollupJobRequest.setOptions(options); Map createRollupJobResponse = entityAsMap(client().performRequest(createRollupJobRequest)); assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + String recoverQuickly = "{\"settings\": {\"index.unassigned.node_left.delayed_timeout\": \"100ms\"}}"; Request updateSettings = new Request("PUT", "/rollup/_settings"); updateSettings.setJsonEntity(recoverQuickly); client().performRequest(updateSettings); // start the rollup job - final Request startRollupJobRequest = new Request("POST", "_rollup/job/rollup-id-test/_start"); + final Request startRollupJobRequest = new Request("POST", rollupEndpoint + "/job/rollup-id-test/_start"); Map startRollupJobResponse = entityAsMap(client().performRequest(startRollupJobRequest)); assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); - assertRollUpJob("rollup-id-test"); + assertRollUpJob("rollup-id-test", rollupEndpoint); List ids = getSearchResults(1); assertThat(ids.toString(), ids, containsInAnyOrder("rollup-id-test$AuaduUZW8tgWmFP87DgzSA")); } @@ -116,9 +127,9 @@ public void testDateHistoIntervalUpgrade() throws Exception { final Request indexRequest = new Request("POST", "/target/_doc/2"); indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.plusDays(1).toString() + "\",\"value\":345}"); client().performRequest(indexRequest); + client().performRequest(new Request("POST", "target/_refresh")); - assertRollUpJob("rollup-id-test"); - client().performRequest(new Request("POST", "rollup/_refresh")); + assertRollUpJob("rollup-id-test", "_xpack/rollup"); List ids = getSearchResults(2); assertThat(ids.toString(), ids, containsInAnyOrder("rollup-id-test$AuaduUZW8tgWmFP87DgzSA", @@ -130,7 +141,9 @@ public void testDateHistoIntervalUpgrade() throws Exception { indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.plusDays(2).toString() + "\",\"value\":456}"); client().performRequest(indexRequest); - assertRollUpJob("rollup-id-test"); + client().performRequest(new Request("POST", "target/_refresh")); + + assertRollUpJob("rollup-id-test", "_xpack/rollup"); client().performRequest(new Request("POST", "rollup/_refresh")); List ids = getSearchResults(3); @@ -143,9 +156,9 @@ public void testDateHistoIntervalUpgrade() throws Exception { final Request indexRequest = new Request("POST", "/target/_doc/4"); indexRequest.setJsonEntity("{\"timestamp\":\"" + timestamp.plusDays(3).toString() + "\",\"value\":567}"); client().performRequest(indexRequest); + client().performRequest(new Request("POST", "target/_refresh")); - assertRollUpJob("rollup-id-test"); - client().performRequest(new Request("POST", "rollup/_refresh")); + assertRollUpJob("rollup-id-test", "_rollup"); List ids = getSearchResults(4); assertThat(ids.toString(), ids, containsInAnyOrder("rollup-id-test$AuaduUZW8tgWmFP87DgzSA", @@ -156,14 +169,22 @@ public void testDateHistoIntervalUpgrade() throws Exception { private List getSearchResults(int expectedCount) throws Exception { final List collectedIDs = new ArrayList<>(); + assertBusy(() -> { collectedIDs.clear(); client().performRequest(new Request("POST", "rollup/_refresh")); final Request searchRequest = new Request("GET", "rollup/_search"); try { Map searchResponse = entityAsMap(client().performRequest(searchRequest)); - assertNotNull(ObjectPath.eval("hits.total.value", searchResponse)); - assertThat(ObjectPath.eval("hits.total.value", searchResponse), equalTo(expectedCount)); + logger.error(searchResponse); + + Object hits = ObjectPath.eval("hits.total", searchResponse); + assertNotNull(hits); + if (hits instanceof Number) { + assertThat(ObjectPath.eval("hits.total", searchResponse), equalTo(expectedCount)); + } else { + assertThat(ObjectPath.eval("hits.total.value", searchResponse), equalTo(expectedCount)); + } for (int i = 0; i < expectedCount; i++) { String id = ObjectPath.eval("hits.hits." + i + "._id", searchResponse); @@ -179,12 +200,23 @@ private List getSearchResults(int expectedCount) throws Exception { } @SuppressWarnings("unchecked") - private void assertRollUpJob(final String rollupJob) throws Exception { + private void assertRollUpJob(final String rollupJob, String endpoint) throws Exception { final Matcher expectedStates = anyOf(equalTo("indexing"), equalTo("started")); - waitForRollUpJob(rollupJob, expectedStates); + waitForRollUpJob(rollupJob, expectedStates, endpoint); // check that the rollup job is started using the RollUp API - final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + final Request getRollupJobRequest = new Request("GET", endpoint + "/job/" + rollupJob); + // Hard to know which node we are talking to, so just remove this deprecation warning if we're hitting + // the old endpoint + if (endpoint.equals("_xpack/rollup")) { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.setWarningsHandler(warnings -> { + warnings.remove("[GET /_xpack/rollup/job/{id}/] is deprecated! Use [GET /_rollup/job/{id}] instead."); + return warnings.size() > 0; + }); + getRollupJobRequest.setOptions(options); + } + Map getRollupJobResponse = entityAsMap(client().performRequest(getRollupJobRequest)); Map job = getJob(getRollupJobResponse, rollupJob); if (job != null) { @@ -220,9 +252,21 @@ private void assertRollUpJob(final String rollupJob) throws Exception { } - private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { + private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates, String endpoint) throws Exception { assertBusy(() -> { - final Request getRollupJobRequest = new Request("GET", "_rollup/job/" + rollupJob); + final Request getRollupJobRequest = new Request("GET", endpoint + "/job/" + rollupJob); + + // Hard to know which node we are talking to, so just remove this deprecation warning if we're hitting + // the old endpoint + if (endpoint.equals("_xpack/rollup")) { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.setWarningsHandler(warnings -> { + logger.error(warnings); + warnings.remove("[GET /_xpack/rollup/job/{id}/] is deprecated! Use [GET /_rollup/job/{id}] instead."); + return warnings.size() > 0; + }); + getRollupJobRequest.setOptions(options); + } Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); From b8e622d906d52f602d27507953161a4ece3eb342 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Wed, 8 May 2019 16:42:03 -0400 Subject: [PATCH 5/7] Throw exception if legacy interval cannot be parsed Due to the fallthrough logic, DateIntervalWrapper assumed that an otherwise unparsable interval was a legacy fixed millis interval. This could then NPE if the interval was just illegal ("foobar"). This commit correctly checks if the legacy millis parsing fails too, and throws an IllegalArgumentException at that point signaling the provided interval is bad. --- .../bucket/histogram/DateIntervalWrapper.java | 12 ++++++++---- .../histogram/DateHistogramAggregatorTests.java | 10 ++++++++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java index c7759be99e62f..229fa0d15bb30 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java @@ -288,11 +288,15 @@ public Rounding createRounding(ZoneId timeZone) { } else { // We're not sure what the interval was originally (legacy) so use old behavior of assuming // calendar first, then fixed. Required because fixed/cal overlap in places ("1h") - DateTimeUnit intervalAsUnit = tryIntervalAsCalendarUnit(); - if (intervalAsUnit != null) { - tzRoundingBuilder = Rounding.builder(tryIntervalAsCalendarUnit()); + DateTimeUnit calInterval = tryIntervalAsCalendarUnit(); + TimeValue fixedInterval = tryIntervalAsFixedUnit(); + if (calInterval != null) { + tzRoundingBuilder = Rounding.builder(calInterval); + } else if (fixedInterval != null) { + tzRoundingBuilder = Rounding.builder(fixedInterval); } else { - tzRoundingBuilder = Rounding.builder(tryIntervalAsFixedUnit()); + // If we get here we have exhausted our options and are not able to parse this interval + throw new IllegalArgumentException("Unable to parse interval [" + dateHistogramInterval + "]"); } } if (timeZone != null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index f671b21eb5e9b..17581b9458413 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -1097,6 +1097,16 @@ public void testLegacyThenNew() throws IOException { assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } + public void testIllegalInterval() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Collections.emptyList(), + aggregation -> aggregation.dateHistogramInterval(new DateHistogramInterval("foobar")).field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Unable to parse interval [foobar]")); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify) throws IOException { From 7049882503717a1802dd351b978f5b3a654d1389 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 9 May 2019 17:38:25 -0400 Subject: [PATCH 6/7] Fix max_bucket test by disallowing partial results (#41959) The Max Bucket test can potentially return a partial response, where one of the shards suceeds but another fails due to the max_bucket setting. In the case of a partial failure, the status code is 200 OK since some results were returned (with failures listed in the body). This makes the yaml test fail since it is expecting a 4xx/5xx failure when catching exception messages. We need to disallow partial results so that the entire query fails and we can check for the max_bucket failure. --- .../rest-api-spec/test/search.aggregation/240_max_buckets.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml index 981bafb6538b3..1b23eea01b75b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml @@ -89,6 +89,7 @@ setup: catch: /.*Trying to create too many buckets.*/ search: rest_total_hits_as_int: true + allow_partial_search_results: false index: test body: aggregations: @@ -106,6 +107,7 @@ setup: catch: /.*Trying to create too many buckets.*/ search: rest_total_hits_as_int: true + allow_partial_search_results: false index: test body: aggregations: From 9e56889a909e25ba7d646179569378aafa38d831 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 10 May 2019 15:27:41 -0400 Subject: [PATCH 7/7] Add documentation for calendar/fixed intervals (#41919) Original PR missed documentation for the new calendar/fixed intervals. This adds the missing documentation --- .../bucket/datehistogram-aggregation.asciidoc | 284 ++++++++++++------ 1 file changed, 197 insertions(+), 87 deletions(-) diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 2ee40b24a8548..2ee9025b6ded8 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -10,122 +10,194 @@ that here the interval can be specified using date/time expressions. Time-based data requires special support because time-based intervals are not always a fixed length. -==== Setting intervals - -There seems to be no limit to the creativity we humans apply to setting our -clocks and calendars. We've invented leap years and leap seconds, standard and -daylight savings times, and timezone offsets of 30 or 45 minutes rather than a -full hour. While these creations help keep us in sync with the cosmos and our -environment, they can make specifying time intervals accurately a real challenge. -The only universal truth our researchers have yet to disprove is that a -millisecond is always the same duration, and a second is always 1000 milliseconds. -Beyond that, things get complicated. - -Generally speaking, when you specify a single time unit, such as 1 hour or 1 day, you -are working with a _calendar interval_, but multiples, such as 6 hours or 3 days, are -_fixed-length intervals_. - -For example, a specification of 1 day (1d) from now is a calendar interval that -means "at -this exact time tomorrow" no matter the length of the day. A change to or from -daylight savings time that results in a 23 or 25 hour day is compensated for and the -specification of "this exact time tomorrow" is maintained. But if you specify 2 or -more days, each day must be of the same fixed duration (24 hours). In this case, if -the specified interval includes the change to or from daylight savings time, the -interval will end an hour sooner or later than you expect. - -There are similar differences to consider when you specify single versus multiple -minutes or hours. Multiple time periods longer than a day are not supported. - -Here are the valid time specifications and their meanings: +==== Calendar and Fixed intervals -milliseconds (ms) :: -Fixed length interval; supports multiples. +When configuring a date histogram aggregation, the interval can be specified +in two manners: calendar-aware time intervals, and fixed time intervals. -seconds (s) :: -1000 milliseconds; fixed length interval (except for the last second of a -minute that contains a leap-second, which is 2000ms long); supports multiples. +Calendar-aware intervals understand that daylight savings changes the length +of specific days, months have different amounts of days, and leap seconds can +be tacked onto a particular year. -minutes (m) :: +Fixed intervals are, by contrast, always multiples of SI units and do not change +based on calendaring context. + +[NOTE] +.Combined `interval` field is deprecated +================================== +deprecated[7.2, `interval` field is deprecated] Historically both calendar and fixed +intervals were configured in a single `interval` field, which led to confusing +semantics. Specifying `1d` would be assumed as a calendar-aware time, +whereas `2d` would be interpreted as fixed time. To get "one day" of fixed time, +the user would need to specify the next smaller unit (in this case, `24h`). + +This combined behavior was often unknown to users, and even when knowledgeable about +the behavior it was difficult to use and confusing. + +This behavior has been deprecated in favor of two new, explicit fields: `calendar_interval` +and `fixed_interval`. + +By forcing a choice between calendar and intervals up front, the semantics of the interval +are clear to the user immediately and there is no ambiguity. The old `interval` field +will be removed in the future. +================================== + +===== Calendar Intervals + +Calendar-aware intervals are configured with the `calendar_interval` parameter. +Calendar intervals can only be specified in "singular" quantities of the unit +(`1d`, `1M`, etc). Multiples, such as `2d`, are not supported and will throw an exception. + +The accepted units for calendar intervals are: + +minute (`m`, `1m`) :: All minutes begin at 00 seconds. -* One minute (1m) is the interval between 00 seconds of the first minute and 00 +One minute is the interval between 00 seconds of the first minute and 00 seconds of the following minute in the specified timezone, compensating for any -intervening leap seconds, so that the number of minutes and seconds past the -hour is the same at the start and end. -* Multiple minutes (__n__m) are intervals of exactly 60x1000=60,000 milliseconds -each. +intervening leap seconds, so that the number of minutes and seconds past the +hour is the same at the start and end. -hours (h) :: +hours (`h`, `1h`) :: All hours begin at 00 minutes and 00 seconds. -* One hour (1h) is the interval between 00:00 minutes of the first hour and 00:00 +One hour (1h) is the interval between 00:00 minutes of the first hour and 00:00 minutes of the following hour in the specified timezone, compensating for any intervening leap seconds, so that the number of minutes and seconds past the hour -is the same at the start and end. -* Multiple hours (__n__h) are intervals of exactly 60x60x1000=3,600,000 milliseconds -each. +is the same at the start and end. -days (d) :: + +days (`d`, `1d`) :: All days begin at the earliest possible time, which is usually 00:00:00 (midnight). -* One day (1d) is the interval between the start of the day and the start of +One day (1d) is the interval between the start of the day and the start of of the following day in the specified timezone, compensating for any intervening time changes. -* Multiple days (__n__d) are intervals of exactly 24x60x60x1000=86,400,000 -milliseconds each. -weeks (w) :: +week (`w`, `1w`) :: -* One week (1w) is the interval between the start day_of_week:hour:minute:second -and the same day of the week and time of the following week in the specified +One week is the interval between the start day_of_week:hour:minute:second +and the same day of the week and time of the following week in the specified timezone. -* Multiple weeks (__n__w) are not supported. -months (M) :: +month (`M`, `1M`) :: -* One month (1M) is the interval between the start day of the month and time of +One month is the interval between the start day of the month and time of day and the same day of the month and time of the following month in the specified timezone, so that the day of the month and time of day are the same at the start and end. -* Multiple months (__n__M) are not supported. -quarters (q) :: +quarter (`q`, `1q`) :: -* One quarter (1q) is the interval between the start day of the month and +One quarter (1q) is the interval between the start day of the month and time of day and the same day of the month and time of day three months later, so that the day of the month and time of day are the same at the start and end. + -* Multiple quarters (__n__q) are not supported. -years (y) :: +year (`y`, `1y`) :: -* One year (1y) is the interval between the start day of the month and time of -day and the same day of the month and time of day the following year in the +One year (1y) is the interval between the start day of the month and time of +day and the same day of the month and time of day the following year in the specified timezone, so that the date and time are the same at the start and end. + -* Multiple years (__n__y) are not supported. -NOTE: -In all cases, when the specified end time does not exist, the actual end time is -the closest available time after the specified end. +===== Calendar Interval Examples +As an example, here is an aggregation requesting bucket intervals of a month in calendar time: -Widely distributed applications must also consider vagaries such as countries that -start and stop daylight savings time at 12:01 A.M., so end up with one minute of -Sunday followed by an additional 59 minutes of Saturday once a year, and countries -that decide to move across the international date line. Situations like -that can make irregular timezone offsets seem easy. +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "date_histogram" : { + "field" : "date", + "calendar_interval" : "month" + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] -As always, rigorous testing, especially around time-change events, will ensure -that your time interval specification is -what you intend it to be. +If you attempt to use multiples of calendar units, the aggregation will fail because only +singular calendar units are supported: -WARNING: -To avoid unexpected results, all connected servers and clients must sync to a -reliable network time service. +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "date_histogram" : { + "field" : "date", + "calendar_interval" : "2d" + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] +// TEST[catch:bad_request] -==== Examples +[source,js] +-------------------------------------------------- +{ + "error" : { + "root_cause" : [...], + "type" : "x_content_parse_exception", + "reason" : "[1:82] [date_histogram] failed to parse field [calendar_interval]", + "caused_by" : { + "type" : "illegal_argument_exception", + "reason" : "The supplied interval [2d] could not be parsed as a calendar interval.", + "stack_trace" : "java.lang.IllegalArgumentException: The supplied interval [2d] could not be parsed as a calendar interval." + } + } +} + +-------------------------------------------------- +// NOTCONSOLE + +===== Fixed Intervals + +Fixed intervals are configured with the `fixed_interval` parameter. + +In contrast to calendar-aware intervals, fixed intervals are a fixed number of SI +units and never deviate, regardless of where they fall on the calendar. One second +is always composed of 1000ms. This allows fixed intervals to be specified in +any multiple of the supported units. + +However, it means fixed intervals cannot express other units such as months, +since the duration of a month is not a fixed quantity. Attempting to specify +a calendar interval like month or quarter will throw an exception. + +The accepted units for fixed intervals are: + +milliseconds (ms) :: + +seconds (s) :: +Defined as 1000 milliseconds each + +minutes (m) :: +All minutes begin at 00 seconds. -Requesting bucket intervals of a month. +Defined as 60 seconds each (60,000 milliseconds) + +hours (h) :: +All hours begin at 00 minutes and 00 seconds. +Defined as 60 minutes each (3,600,000 milliseconds) + +days (d) :: +All days begin at the earliest possible time, which is usually 00:00:00 +(midnight). + +Defined as 24 hours (86,400,000 milliseconds) + +===== Fixed Interval Examples + +If we try to recreate the "month" `calendar_interval` from earlier, we can approximate that with +30 fixed days: [source,js] -------------------------------------------------- @@ -135,7 +207,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "calendar_interval" : "month" + "fixed_interval" : "30d" } } } @@ -144,11 +216,7 @@ POST /sales/_search?size=0 // CONSOLE // TEST[setup:sales] -You can also specify time values using abbreviations supported by -<> parsing. -Note that fractional time values are not supported, but you can address this by -shifting to another -time unit (e.g., `1.5h` could instead be specified as `90m`). +But if we try to use a calendar unit that is not supported, such as weeks, we'll get an exception: [source,js] -------------------------------------------------- @@ -158,7 +226,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "fixed_interval" : "90m" + "fixed_interval" : "2w" } } } @@ -166,6 +234,50 @@ POST /sales/_search?size=0 -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[catch:bad_request] + +[source,js] +-------------------------------------------------- +{ + "error" : { + "root_cause" : [...], + "type" : "x_content_parse_exception", + "reason" : "[1:82] [date_histogram] failed to parse field [fixed_interval]", + "caused_by" : { + "type" : "illegal_argument_exception", + "reason" : "failed to parse setting [date_histogram.fixedInterval] with value [2w] as a time value: unit is missing or unrecognized", + "stack_trace" : "java.lang.IllegalArgumentException: failed to parse setting [date_histogram.fixedInterval] with value [2w] as a time value: unit is missing or unrecognized" + } + } +} + +-------------------------------------------------- +// NOTCONSOLE + +===== Notes + +In all cases, when the specified end time does not exist, the actual end time is +the closest available time after the specified end. + +Widely distributed applications must also consider vagaries such as countries that +start and stop daylight savings time at 12:01 A.M., so end up with one minute of +Sunday followed by an additional 59 minutes of Saturday once a year, and countries +that decide to move across the international date line. Situations like +that can make irregular timezone offsets seem easy. + +As always, rigorous testing, especially around time-change events, will ensure +that your time interval specification is +what you intend it to be. + +WARNING: +To avoid unexpected results, all connected servers and clients must sync to a +reliable network time service. + +NOTE: fractional time values are not supported, but you can address this by +shifting to another time unit (e.g., `1.5h` could instead be specified as `90m`). + +NOTE: You can also specify time values using abbreviations supported by +<> parsing. ===== Keys @@ -522,8 +634,6 @@ control the order using the `order` setting. This setting supports the same `order` functionality as <>. -deprecated[6.0.0, Use `_key` instead of `_time` to order buckets by their dates/keys] - ===== Using a script to aggregate by day of the week When you need to aggregate the results by day of the week, use a script that