diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java b/buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java index e2af34dbabdc0..c0965a16479a7 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java @@ -171,30 +171,38 @@ public UnreleasedVersionInfo unreleasedInfo(Version version) { } public void forPreviousUnreleased(Consumer consumer) { - getUnreleased().stream() + List collect = getUnreleased().stream() .filter(version -> version.equals(currentVersion) == false) - .forEach(version -> consumer.accept( - new UnreleasedVersionInfo( + .map(version -> new UnreleasedVersionInfo( version, getBranchFor(version), getGradleProjectNameFor(version) ) - )); + ) + .collect(Collectors.toList()); + + collect.forEach(uvi -> consumer.accept(uvi)); } private String getGradleProjectNameFor(Version version) { if (version.equals(currentVersion)) { throw new IllegalArgumentException("The Gradle project to build " + version + " is the current build."); } + Map> releasedMajorGroupedByMinor = getReleasedMajorGroupedByMinor(); if (version.getRevision() == 0) { - if (releasedMajorGroupedByMinor - .get(releasedMajorGroupedByMinor.keySet().stream().max(Integer::compareTo).orElse(0)) - .contains(version)) { - return "minor"; + List unreleasedStagedOrMinor = getUnreleased().stream() + .filter(v -> v.getRevision() == 0) + .collect(Collectors.toList()); + if (unreleasedStagedOrMinor.size() > 2) { + if (unreleasedStagedOrMinor.get(unreleasedStagedOrMinor.size() - 2).equals(version)) { + return "minor"; + } else{ + return "staged"; + } } else { - return "staged"; + return "minor"; } } else { if (releasedMajorGroupedByMinor @@ -210,7 +218,14 @@ private String getGradleProjectNameFor(Version version) { private String getBranchFor(Version version) { switch (getGradleProjectNameFor(version)) { case "minor": - return version.getMajor() + ".x"; + // The .x branch will always point to the latest minor (for that major), so a "minor" project will be on the .x branch + // unless there is more recent (higher) minor. + final Version latestInMajor = getLatestVersionByKey(groupByMajor, version.getMajor()); + if (latestInMajor.getMinor() == version.getMinor() && isFinalMinor(version) == false) { + return version.getMajor() + ".x"; + } else { + return version.getMajor() + "." + version.getMinor(); + } case "staged": case "maintenance": case "bugfix": @@ -220,13 +235,30 @@ private String getBranchFor(Version version) { } } + /** + * There is no way to infer that 6.7 is the final minor release in the 6.x series until we add a 7.0.1 or 7.1.0 version. + * Based on the available versions (7.0.0, 6.7.0, 6.6.1, 6.6.0) the logical conclusion is that 7.0.0 is "master" and 6.7.0 is "6.x" + * This method force 6.7.0 to be recognised as being on the "6.7" branch + */ + private boolean isFinalMinor(Version version) { + return (version.getMajor() == 6 && version.getMinor() == 7); + } + public List getUnreleased() { List unreleased = new ArrayList<>(); // The current version is being worked, is always unreleased unreleased.add(currentVersion); // the tip of the previous major is unreleased for sure, be it a minor or a bugfix - unreleased.add(getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1)); + final Version latestOfPreviousMajor = getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1); + unreleased.add(latestOfPreviousMajor); + if (latestOfPreviousMajor.getRevision() == 0) { + // if the previous major is a x.y.0 release, then the tip of the minor before that (y-1) is also unreleased + final Version previousMinor = getLatestInMinor(latestOfPreviousMajor.getMajor(), latestOfPreviousMajor.getMinor() - 1); + if (previousMinor != null) { + unreleased.add(previousMinor); + } + } final Map> groupByMinor = getReleasedMajorGroupedByMinor(); int greatestMinor = groupByMinor.keySet().stream().max(Integer::compareTo).orElse(0); @@ -239,8 +271,10 @@ public List getUnreleased() { unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor - 1)); if (groupByMinor.getOrDefault(greatestMinor - 1, emptyList()).size() == 1) { // we found that the previous minor is staged but not yet released - // in this case, the minor before that has a bugfix - unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor - 2)); + // in this case, the minor before that has a bugfix, should there be such a minor + if (greatestMinor >= 2) { + unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor - 2)); + } } } @@ -252,6 +286,13 @@ public List getUnreleased() { ); } + private Version getLatestInMinor(int major, int minor) { + return groupByMajor.get(major).stream() + .filter(v -> v.getMinor() == minor) + .max(Version::compareTo) + .orElse(null); + } + private Version getLatestVersionByKey(Map> groupByMajor, int key) { return groupByMajor.getOrDefault(key, emptyList()).stream() .max(Version::compareTo) diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/VersionCollectionTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/VersionCollectionTests.java index d1b4e893ec6ad..58ab66e99459b 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/VersionCollectionTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/VersionCollectionTests.java @@ -81,6 +81,12 @@ public class VersionCollectionTests extends GradleUnitTestCase { "6_0_0", "6_0_1", "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", "6_3_0", "6_3_1", "6_3_2", "6_4_0", "6_4_1", "6_4_2" )); + sampleVersions.put("7.0.0", asList( + "7_0_0", "6_7_0", "6_6_2", "6_6_1", "6_6_0" + )); + sampleVersions.put("7.1.0", asList( + "7_1_0", "7_0_0", "6_7_0", "6_6_1", "6_6_0" + )); } @Test(expected = IllegalArgumentException.class) @@ -145,6 +151,11 @@ public void testWireCompatible() { singletonList("7.3.0"), getVersionCollection("8.0.0").getWireCompatible() ); + assertVersionsEquals( + asList("6.7.0", "7.0.0"), + getVersionCollection("7.1.0").getWireCompatible() + ); + } public void testWireCompatibleUnreleased() { @@ -171,6 +182,10 @@ public void testWireCompatibleUnreleased() { singletonList("7.3.0"), getVersionCollection("8.0.0").getUnreleasedWireCompatible() ); + assertVersionsEquals( + asList("6.7.0", "7.0.0"), + getVersionCollection("7.1.0").getWireCompatible() + ); } public void testIndexCompatible() { @@ -270,6 +285,14 @@ public void testGetUnreleased() { asList("7.1.1", "7.2.0", "7.3.0", "8.0.0"), getVersionCollection("8.0.0").getUnreleased() ); + assertVersionsEquals( + asList("6.6.1", "6.7.0", "7.0.0", "7.1.0"), + getVersionCollection("7.1.0").getUnreleased() + ); + assertVersionsEquals( + asList("6.6.2", "6.7.0", "7.0.0"), + getVersionCollection("7.0.0").getUnreleased() + ); } public void testGetBranch() { @@ -293,6 +316,14 @@ public void testGetBranch() { asList("7.1", "7.2", "7.x"), getVersionCollection("8.0.0") ); + assertUnreleasedBranchNames( + asList("6.6", "6.7", "7.0"), + getVersionCollection("7.1.0") + ); + assertUnreleasedBranchNames( + asList("6.6", "6.7"), + getVersionCollection("7.0.0") + ); } public void testGetGradleProjectName() { @@ -309,13 +340,17 @@ public void testGetGradleProjectName() { getVersionCollection("6.4.2") ); assertUnreleasedGradleProjectNames( - asList("maintenance", "bugfix", "staged"), + asList("maintenance", "bugfix", "minor"), getVersionCollection("6.6.0") ); assertUnreleasedGradleProjectNames( asList("bugfix", "staged", "minor"), getVersionCollection("8.0.0") ); + assertUnreleasedGradleProjectNames( + asList("maintenance", "staged", "minor"), + getVersionCollection("7.1.0") + ); } public void testCompareToAuthoritative() { diff --git a/docs/reference/ml/apis/put-datafeed.asciidoc b/docs/reference/ml/apis/put-datafeed.asciidoc index 18c611e97cac1..05e02ce3615df 100644 --- a/docs/reference/ml/apis/put-datafeed.asciidoc +++ b/docs/reference/ml/apis/put-datafeed.asciidoc @@ -19,6 +19,11 @@ Instantiates a {dfeed}. You must create a job before you create a {dfeed}. You can associate only one {dfeed} to each job. +IMPORTANT: You must use {kib} or this API to create a {dfeed}. Do not put a {dfeed} + directly to the `.ml-config` index using the Elasticsearch index API. + If {es} {security-features} are enabled, do not give users `write` + privileges on the `.ml-config` index. + ==== Path Parameters diff --git a/docs/reference/ml/apis/put-job.asciidoc b/docs/reference/ml/apis/put-job.asciidoc index 4abeebee3e47a..e3d80c276dc55 100644 --- a/docs/reference/ml/apis/put-job.asciidoc +++ b/docs/reference/ml/apis/put-job.asciidoc @@ -12,7 +12,13 @@ Instantiates a job. `PUT _ml/anomaly_detectors/` -//===== Description +===== Description + +IMPORTANT: You must use {kib} or this API to create a {ml} job. Do not put a job + directly to the `.ml-config` index using the Elasticsearch index API. + If {es} {security-features} are enabled, do not give users `write` + privileges on the `.ml-config` index. + ==== Path Parameters diff --git a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java index 13dadd051c273..a2880c0d330ad 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java @@ -260,6 +260,7 @@ public void testBackgroundRetentionLeaseSync() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/38588") public void testRetentionLeasesSyncOnRecovery() throws Exception { final int numberOfReplicas = 2 - scaledRandomIntBetween(0, 2); internalCluster().ensureAtLeastNumDataNodes(1 + numberOfReplicas); diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 84c480b8d510b..20c4601c19953 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -43,7 +43,7 @@ public class VersionUtils { * rules here match up with the rules in gradle then this should * produce sensible results. * @return a tuple containing versions with backwards compatibility - * guarantees in v1 and versions without the guranteees in v2 + * guarantees in v1 and versions without the guarantees in v2 */ static Tuple, List> resolveReleasedVersions(Version current, Class versionClass) { // group versions into major version @@ -52,7 +52,7 @@ static Tuple, List> resolveReleasedVersions(Version curre // this breaks b/c 5.x is still in version list but master doesn't care about it! //assert majorVersions.size() == 2; // TODO: remove oldVersions, we should only ever have 2 majors in Version - List oldVersions = majorVersions.getOrDefault((int)current.major - 2, Collections.emptyList()); + List> oldVersions = splitByMinor(majorVersions.getOrDefault((int)current.major - 2, Collections.emptyList())); List> previousMajor = splitByMinor(majorVersions.get((int)current.major - 1)); List> currentMajor = splitByMinor(majorVersions.get((int)current.major)); @@ -67,7 +67,11 @@ static Tuple, List> resolveReleasedVersions(Version curre // on a stable or release branch, ie N.x stableVersions = currentMajor; // remove the next maintenance bugfix - moveLastToUnreleased(previousMajor, unreleasedVersions); + final Version prevMajorLastMinor = moveLastToUnreleased(previousMajor, unreleasedVersions); + if (prevMajorLastMinor.revision == 0 && previousMajor.isEmpty() == false) { + // The latest minor in the previous major is a ".0" release, so there must be an unreleased bugfix for the minor before that + moveLastToUnreleased(previousMajor, unreleasedVersions); + } } // remove next minor @@ -78,12 +82,21 @@ static Tuple, List> resolveReleasedVersions(Version curre moveLastToUnreleased(stableVersions, unreleasedVersions); } // remove the next bugfix - moveLastToUnreleased(stableVersions, unreleasedVersions); + if (stableVersions.isEmpty() == false) { + moveLastToUnreleased(stableVersions, unreleasedVersions); + } } - List releasedVersions = Stream.concat(oldVersions.stream(), - Stream.concat(previousMajor.stream(), currentMajor.stream()).flatMap(List::stream)) - .collect(Collectors.toList()); + // If none of the previous major was released, then the last minor and bugfix of the old version was not released either. + if (previousMajor.isEmpty()) { + assert currentMajor.isEmpty() : currentMajor; + // minor of the old version is being staged + moveLastToUnreleased(oldVersions, unreleasedVersions); + // bugix of the old version is also being staged + moveLastToUnreleased(oldVersions, unreleasedVersions); + } + List releasedVersions = Stream.of(oldVersions, previousMajor, currentMajor) + .flatMap(List::stream).flatMap(List::stream).collect(Collectors.toList()); Collections.sort(unreleasedVersions); // we add unreleased out of order, so need to sort here return new Tuple<>(Collections.unmodifiableList(releasedVersions), Collections.unmodifiableList(unreleasedVersions)); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index 80bded6a5d1d3..5fdd9dc315f6b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -970,6 +970,7 @@ public void testMustCloseIndexAndPauseToRestartWithPutFollowing() throws Excepti () -> followerClient().execute(PutFollowAction.INSTANCE, followRequest2).actionGet()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/38617") public void testIndexFallBehind() throws Exception { final int numberOfPrimaryShards = randomIntBetween(1, 3); final String leaderIndexSettings = getIndexSettings(numberOfPrimaryShards, between(0, 1), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java index 05209628542fe..6508ee5cb2054 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java @@ -18,6 +18,7 @@ import java.io.IOException; import java.time.Clock; +import java.time.Instant; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.EnumSet; @@ -28,7 +29,7 @@ public class ScheduledEventTests extends AbstractSerializingTestCase { public static ScheduledEvent createScheduledEvent(String calendarId) { - ZonedDateTime start = Clock.systemUTC().instant().atZone(ZoneOffset.UTC); + ZonedDateTime start = nowWithMillisResolution(); return new ScheduledEvent(randomAlphaOfLength(10), start, start.plusSeconds(randomIntBetween(1, 10000)), calendarId, null); } @@ -119,4 +120,8 @@ public void testLenientParser() throws IOException { ScheduledEvent.LENIENT_PARSER.apply(parser, null); } } + + private static ZonedDateTime nowWithMillisResolution() { + return Instant.ofEpochMilli(Clock.systemUTC().millis()).atZone(ZoneOffset.UTC); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 69e008f60c696..62cb04f035318 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -237,6 +237,7 @@ public void testInvalidateApiKeysForApiKeyName() throws InterruptedException, Ex verifyInvalidateResponse(1, responses, invalidateResponse); } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/38408") public void testGetAndInvalidateApiKeysWithExpiredAndInvalidatedApiKey() throws Exception { List responses = createApiKeys(1, null); Instant created = Instant.now(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java index 5c9dafeaca001..947bf2e210081 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java @@ -17,12 +17,15 @@ import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.input.Input; +import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; +import org.hamcrest.Matcher; +import java.time.ZonedDateTime; import java.util.Locale; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -172,10 +175,10 @@ public void testThatHistoryContainsStatus() throws Exception { assertThat(active, is(status.state().isActive())); String timestamp = source.getValue("status.state.timestamp"); - assertThat(timestamp, is(status.state().getTimestamp().toString())); + assertThat(timestamp, isSameDate(status.state().getTimestamp())); String lastChecked = source.getValue("status.last_checked"); - assertThat(lastChecked, is(status.lastChecked().toString())); + assertThat(lastChecked, isSameDate(status.lastChecked())); Integer version = source.getValue("status.version"); int expectedVersion = (int) (status.version() - 1); @@ -196,4 +199,14 @@ public void testThatHistoryContainsStatus() throws Exception { assertThat(mappingSource.getValue("doc.properties.status.properties.status"), is(nullValue())); assertThat(mappingSource.getValue("doc.properties.status.properties.status.properties.active"), is(nullValue())); } + + + private Matcher isSameDate(ZonedDateTime zonedDateTime) { + /* + When comparing timestamps returned from _search/.watcher-history* the same format of date has to be used + during serialisation to json on index time. + The toString of ZonedDateTime is omitting the millisecond part when is 0. This was not the case in joda. + */ + return is(WatcherDateTimeUtils.formatDate(zonedDateTime)); + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStatusIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStatusIntegrationTests.java index 9f738d8daa6b2..20e27bd8b8d15 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStatusIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchStatusIntegrationTests.java @@ -24,6 +24,7 @@ public class WatchStatusIntegrationTests extends AbstractWatcherIntegrationTestCase { + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/38619") public void testThatStatusGetsUpdated() { WatcherClient watcherClient = watcherClient(); watcherClient.preparePutWatch("_name")