diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index c20f0128f2379..826ec1211a66a 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.gradle.testclusters; -import org.elasticsearch.gradle.ElasticsearchDistribution; import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.PropertyNormalization; import org.elasticsearch.gradle.ReaperService; @@ -59,24 +58,23 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private final String clusterName; private final NamedDomainObjectContainer nodes; private final File workingDirBase; - private final Function distributionFactory; private final LinkedHashMap> waitConditions = new LinkedHashMap<>(); private final Project project; private final ReaperService reaper; + private int nodeIndex = 0; - public ElasticsearchCluster(String path, String clusterName, Project project, ReaperService reaper, - Function distributionFactory, File workingDirBase) { + public ElasticsearchCluster(String path, String clusterName, Project project, + ReaperService reaper, File workingDirBase) { this.path = path; this.clusterName = clusterName; this.project = project; this.reaper = reaper; - this.distributionFactory = distributionFactory; this.workingDirBase = workingDirBase; this.nodes = project.container(ElasticsearchNode.class); this.nodes.add( new ElasticsearchNode( path, clusterName + "-0", - project, reaper, workingDirBase, distributionFactory.apply(0) + project, reaper, workingDirBase ) ); // configure the cluster name eagerly so nodes know about it @@ -100,7 +98,7 @@ public void setNumberOfNodes(int numberOfNodes) { for (int i = nodes.size() ; i < numberOfNodes; i++) { this.nodes.add(new ElasticsearchNode( - path, clusterName + "-" + i, project, reaper, workingDirBase, distributionFactory.apply(i) + path, clusterName + "-" + i, project, reaper, workingDirBase )); } } @@ -126,6 +124,11 @@ public void setVersion(String version) { nodes.all(each -> each.setVersion(version)); } + @Override + public void setVersions(List version) { + nodes.all(each -> each.setVersions(version)); + } + @Override public void setTestDistribution(TestDistribution distribution) { nodes.all(each -> each.setTestDistribution(distribution)); @@ -249,8 +252,8 @@ public void start() { if (nodes.stream().map(ElasticsearchNode::getName).anyMatch( name -> name == null)) { nodeNames = null; } else { - nodeNames = nodes.stream().map(ElasticsearchNode::getName).collect(Collectors.joining(",")); - }; + nodeNames = nodes.stream().map(ElasticsearchNode::getName).map(this::safeName).collect(Collectors.joining(",")); + } for (ElasticsearchNode node : nodes) { if (nodeNames != null) { // Can only configure master nodes if we have node names defined @@ -269,6 +272,19 @@ public void restart() { nodes.forEach(ElasticsearchNode::restart); } + @Override + public void goToNextVersion() { + nodes.all(ElasticsearchNode::goToNextVersion); + } + + public void nextNodeToNextVersion() { + if (nodeIndex + 1 > nodes.size()) { + throw new TestClustersException("Ran out of nodes to take to the next version"); + } + nodes.getByName(clusterName + "-" + nodeIndex).goToNextVersion(); + nodeIndex += 1; + } + @Override public void extraConfigFile(String destination, File from) { nodes.all(node -> node.extraConfigFile(destination, from)); @@ -363,7 +379,6 @@ private void addWaitForClusterHealth() { nodes.size() ); if (httpSslEnabled) { - getFirstNode().configureHttpWait(wait); } List> credentials = getFirstNode().getCredentials(); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 900a0f99ed9f3..2cc7fbe24dc69 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.gradle.testclusters; +import org.elasticsearch.gradle.DistributionDownloadPlugin; import org.elasticsearch.gradle.ElasticsearchDistribution; import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.LazyPropertyList; @@ -31,8 +32,8 @@ import org.elasticsearch.gradle.http.WaitForHttpResource; import org.gradle.api.Action; import org.gradle.api.Named; +import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; -import org.gradle.api.file.FileCollection; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; import org.gradle.api.tasks.Classpath; @@ -71,6 +72,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -135,23 +137,23 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final Path esStdoutFile; private final Path esStderrFile; private final Path tmpDir; + private final Path distroDir; - private String version; + private int currentDistro = 0; private TestDistribution testDistribution; - private ElasticsearchDistribution distribution; + private List distributions = new ArrayList<>(); private File javaHome; private volatile Process esProcess; private Function nameCustomization = Function.identity(); private boolean isWorkingDirConfigured = false; - ElasticsearchNode(String path, String name, Project project, ReaperService reaper, File workingDirBase, - ElasticsearchDistribution distribution) { + ElasticsearchNode(String path, String name, Project project, ReaperService reaper, File workingDirBase) { this.path = path; this.name = name; this.project = project; this.reaper = reaper; - this.workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath(); - this.distribution = distribution; + workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath(); + distroDir = workingDir.resolve("distro"); confPathRepo = workingDir.resolve("repo"); configFile = workingDir.resolve("config/elasticsearch.yml"); confPathData = workingDir.resolve("data"); @@ -173,15 +175,31 @@ public String getName() { @Internal public Version getVersion() { - return distribution.getVersion(); + return distributions.get(currentDistro).getVersion(); } @Override public void setVersion(String version) { requireNonNull(version, "null version passed when configuring test cluster `" + this + "`"); + String distroName = "testclusters" + path.replace(":", "-") + "-" + this.name + "-" + version + "-"; + NamedDomainObjectContainer container = DistributionDownloadPlugin.getContainer(project); + if (container.findByName(distroName) == null){ + container.create(distroName); + } + ElasticsearchDistribution distro = container.getByName(distroName); + distro.setVersion(version); + setDistributionType(distro, testDistribution); + distributions.add(distro); + } + + @Override + public void setVersions(List versions) { + requireNonNull(versions, "null version list passed when configuring test cluster `" + this + "`"); checkFrozen(); - this.version = version; - this.distribution.setVersion(version); + distributions.clear(); + for (String version : versions) { + setVersion(version); + } } @Internal @@ -191,8 +209,8 @@ public TestDistribution getTestDistribution() { // package private just so test clusters plugin can access to wire up task dependencies @Internal - ElasticsearchDistribution getDistribution() { - return distribution; + List getDistributions() { + return distributions; } @Override @@ -200,14 +218,20 @@ public void setTestDistribution(TestDistribution testDistribution) { requireNonNull(testDistribution, "null distribution passed when configuring test cluster `" + this + "`"); checkFrozen(); this.testDistribution = testDistribution; + for (ElasticsearchDistribution distribution : distributions) { + setDistributionType(distribution, testDistribution); + } + } + + private void setDistributionType(ElasticsearchDistribution distribution, TestDistribution testDistribution) { if (testDistribution == TestDistribution.INTEG_TEST) { - this.distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP); + distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP); } else { - this.distribution.setType(ElasticsearchDistribution.Type.ARCHIVE); + distribution.setType(ElasticsearchDistribution.Type.ARCHIVE); if (testDistribution == TestDistribution.DEFAULT) { - this.distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT); + distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT); } else { - this.distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS); + distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS); } } } @@ -317,8 +341,7 @@ public Path getConfigDir() { @Override public void freeze() { - requireNonNull(distribution, "null distribution passed when configuring test cluster `" + this + "`"); - requireNonNull(getVersion(), "null version passed when configuring test cluster `" + this + "`"); + requireNonNull(distributions, "null distribution passed when configuring test cluster `" + this + "`"); requireNonNull(javaHome, "null javaHome passed when configuring test cluster `" + this + "`"); LOGGER.info("Locking configuration of `{}`", this); configurationFrozen.set(true); @@ -361,10 +384,13 @@ public synchronized void start() { try { if (isWorkingDirConfigured == false) { logToProcessStdout("Configuring working directory: " + workingDir); - // Only configure working dir once so we don't lose data on restarts + // make sure we always start fresh + if (Files.exists(workingDir)) { + project.delete(workingDir); + } isWorkingDirConfigured = true; - createWorkingDir(getExtractedDistributionDir()); } + createWorkingDir(getExtractedDistributionDir()); } catch (IOException e) { throw new UncheckedIOException("Failed to create working directory for " + this, e); } @@ -446,6 +472,18 @@ public void restart() { start(); } + @Override + public void goToNextVersion() { + if (currentDistro + 1 >= distributions.size()) { + throw new TestClustersException("Ran out of versions to go to for " + this); + } + LOGGER.info("Switch version from {} to {} for {}", + getVersion(), distributions.get(currentDistro + 1).getVersion(), this + ); + currentDistro += 1; + restart(); + } + private boolean isSettingMissingOrTrue(String name) { return Boolean.valueOf(settings.getOrDefault(name, "false").toString()); } @@ -474,8 +512,9 @@ private void installModules() { if (testDistribution == TestDistribution.INTEG_TEST) { logToProcessStdout("Installing " + modules.size() + "modules"); for (File module : modules) { - Path destination = workingDir.resolve("modules").resolve(module.getName().replace(".zip", "") - .replace("-" + version, "")); + Path destination = distroDir.resolve("modules").resolve(module.getName().replace(".zip", "") + .replace("-" + getVersion(), "") + .replace("-SNAPSHOT", "")); // only install modules that are not already bundled with the integ-test distribution if (Files.exists(destination) == false) { @@ -492,7 +531,7 @@ private void installModules() { } } } else { - LOGGER.info("Not installing " + modules.size() + "(s) since the " + distribution + " distribution already " + + LOGGER.info("Not installing " + modules.size() + "(s) since the " + distributions + " distribution already " + "has them"); } } @@ -533,8 +572,8 @@ public void user(Map userSpec) { private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) { if ( - Files.exists(workingDir.resolve("bin").resolve(tool)) == false && - Files.exists(workingDir.resolve("bin").resolve(tool + ".bat")) == false + Files.exists(distroDir.resolve("bin").resolve(tool)) == false && + Files.exists(distroDir.resolve("bin").resolve(tool + ".bat")) == false ) { throw new TestClustersException("Can't run bin script: `" + tool + "` does not exist. " + "Is this the distribution you expect it to be ?"); @@ -542,7 +581,7 @@ private void runElaticsearchBinScriptWithInput(String input, String tool, String try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) { LoggedExec.exec(project, spec -> { spec.setEnvironment(getESEnvironment()); - spec.workingDir(workingDir); + spec.workingDir(distroDir); spec.executable( OS.conditionalString() .onUnix(() -> "./bin/" + tool) @@ -620,8 +659,8 @@ private void startElasticsearchProcess() { final ProcessBuilder processBuilder = new ProcessBuilder(); List command = OS.>conditional() - .onUnix(() -> Arrays.asList("./bin/elasticsearch")) - .onWindows(() -> Arrays.asList("cmd", "/c", "bin\\elasticsearch.bat")) + .onUnix(() -> Arrays.asList(distroDir.getFileName().resolve("./bin/elasticsearch").toString())) + .onWindows(() -> Arrays.asList("cmd", "/c", distroDir.getFileName().resolve("bin\\elasticsearch.bat").toString())) .supply(); processBuilder.command(command); processBuilder.directory(workingDir.toFile()); @@ -821,7 +860,7 @@ private void waitForProcessToExit(ProcessHandle processHandle) { } private void createWorkingDir(Path distroExtractDir) throws IOException { - syncWithLinks(distroExtractDir, workingDir); + syncWithLinks(distroExtractDir, distroDir); Files.createDirectories(configFile.getParent()); Files.createDirectories(confPathRepo); Files.createDirectories(confPathData); @@ -844,7 +883,14 @@ private void syncWithLinks(Path sourceRoot, Path destinationRoot) { try (Stream stream = Files.walk(sourceRoot)) { stream.forEach(source -> { - Path destination = destinationRoot.resolve(sourceRoot.relativize(source)); + Path relativeDestination = sourceRoot.relativize(source); + if (relativeDestination.getNameCount() <= 1) { + return; + } + // Throw away the first name as the archives have everything in a single top level folder we are not interested in + relativeDestination = relativeDestination.subpath(1, relativeDestination.getNameCount()); + + Path destination = destinationRoot.resolve(relativeDestination); if (Files.isDirectory(source)) { try { Files.createDirectories(destination); @@ -920,9 +966,6 @@ private void createConfiguration() { .forEach(defaultConfig::remove); try { - // We create hard links for the distribution, so we need to remove the config file before writing it - // to prevent the changes to reflect across all copies. - Files.delete(configFile); Files.write( configFile, Stream.concat( @@ -931,8 +974,21 @@ private void createConfiguration() { ) .map(entry -> entry.getKey() + ": " + entry.getValue()) .collect(Collectors.joining("\n")) - .getBytes(StandardCharsets.UTF_8) + .getBytes(StandardCharsets.UTF_8), + StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE ); + + final List configFiles; + try (Stream stream = Files.list(distroDir.resolve("config"))) { + configFiles = stream.collect(Collectors.toList()); + } + logToProcessStdout("Copying additional config files from distro " + configFiles); + for (Path file : configFiles) { + Path dest = configFile.getParent().resolve(file.getFileName()); + if (Files.exists(dest) == false) { + Files.copy(file, dest); + } + } } catch (IOException e) { throw new UncheckedIOException("Could not write config file: " + configFile, e); } @@ -972,7 +1028,7 @@ private List readPortsFile(Path file) throws IOException { } private Path getExtractedDistributionDir() { - return Paths.get(distribution.getExtracted().toString()).resolve("elasticsearch-" + version); + return Paths.get(distributions.get(currentDistro).getExtracted().toString()); } private List getInstalledFileSet(Action filter) { @@ -1007,19 +1063,26 @@ private List getInstalledFiles() { } @Classpath - private List getDistributionClasspath() { - ArrayList files = new ArrayList<>(project.fileTree(getExtractedDistributionDir()) - .matching(filter -> filter.include("**/*.jar")) - .getFiles()); - files.sort(Comparator.comparing(File::getName)); - - return files; + private Set getDistributionClasspath() { + return getDistributionFiles(filter -> filter.include("**/*.jar")); } @InputFiles @PathSensitive(PathSensitivity.RELATIVE) - private FileCollection getDistributionFiles() { - return project.fileTree(getExtractedDistributionDir()).minus(project.files(getDistributionClasspath())); + private Set getDistributionFiles() { + return getDistributionFiles(filter -> filter.exclude("**/*.jar")); + } + + private Set getDistributionFiles(Action patternFilter) { + Set files = new TreeSet<>(); + for (ElasticsearchDistribution distribution : distributions) { + files.addAll( + project.fileTree(Paths.get(distribution.getExtracted().toString())) + .matching(patternFilter) + .getFiles() + ); + } + return files; } @Nested diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java index 8b8c980f523f4..1b2eb44e66b6b 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java @@ -38,6 +38,8 @@ public interface TestClusterConfiguration { void setVersion(String version); + void setVersions(List version); + void setTestDistribution(TestDistribution distribution); void plugin(URI plugin); @@ -86,6 +88,8 @@ public interface TestClusterConfiguration { void restart(); + void goToNextVersion(); + void extraConfigFile(String destination, File from); void extraConfigFile(String destination, File from, PropertyNormalization normalization); @@ -165,7 +169,7 @@ default void waitForConditions( default String safeName(String name) { return name .replaceAll("^[^a-zA-Z0-9]+", "") - .replaceAll("[^a-zA-Z0-9]+", "-"); + .replaceAll("[^a-zA-Z0-9\\.]+", "-"); } boolean isProcessAlive(); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java index 72703399c95a5..1669a62d57b54 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java @@ -18,9 +18,9 @@ default void useCluster(ElasticsearchCluster cluster) { ); } - for (ElasticsearchNode node : cluster.getNodes()) { - this.dependsOn(node.getDistribution().getExtracted()); - } + cluster.getNodes().stream().flatMap(node -> node.getDistributions().stream()).forEach( distro -> + dependsOn(distro.getExtracted()) + ); getClusters().add(cluster); } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index b6c8c39e2edf2..5b6d6b4194a72 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -19,7 +19,6 @@ package org.elasticsearch.gradle.testclusters; import org.elasticsearch.gradle.DistributionDownloadPlugin; -import org.elasticsearch.gradle.ElasticsearchDistribution; import org.elasticsearch.gradle.ReaperPlugin; import org.elasticsearch.gradle.ReaperService; import org.gradle.api.NamedDomainObjectContainer; @@ -76,8 +75,6 @@ public void apply(Project project) { } private NamedDomainObjectContainer createTestClustersContainerExtension(Project project) { - NamedDomainObjectContainer distros = DistributionDownloadPlugin.getContainer(project); - // Create an extensions that allows describing clusters NamedDomainObjectContainer container = project.container( ElasticsearchCluster.class, @@ -86,7 +83,6 @@ private NamedDomainObjectContainer createTestClustersConta name, project, reaper, - i -> distros.create(name + "-" + i), new File(project.getBuildDir(), "testclusters") ) ); diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 15c44f38f7cf3..1dc5ddd1acee4 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -19,9 +19,10 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.RestTestRunnerTask apply plugin: 'elasticsearch.standalone-test' +apply plugin: 'elasticsearch.testclusters' // This is a top level task which we will add dependencies to below. // It is a single task that can be used to backcompat tests against all versions. @@ -30,65 +31,53 @@ task bwcTest { group = 'verification' } -for (Version version : bwcVersions.indexCompatible) { - String baseName = "v${version}" +for (Version bwcVersion : bwcVersions.indexCompatible) { + String baseName = "v${bwcVersion}" - Task oldClusterTest = tasks.create(name: "${baseName}#oldClusterTest", type: RestIntegTestTask) { - mustRunAfter(precommit) - } - tasks.getByName("${baseName}#oldClusterTestRunner").configure { - systemProperty 'tests.is_old_cluster', 'true' - systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") - systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo/" + baseName) - } - - Object extension = extensions.findByName("${baseName}#oldClusterTestCluster") - configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { - bwcVersion = version - numBwcNodes = 2 - numNodes = 2 - clusterName = 'full-cluster-restart' - - // some tests rely on the translog not being flushed - setting 'indices.memory.shard_inactive_time', '20m' - - if (version.onOrAfter('5.3.0')) { + testClusters { + "${baseName}" { + versions = [ bwcVersion.toString(), project.version ] + numberOfNodes = 2 + // some tests rely on the translog not being flushed + setting 'indices.memory.shard_inactive_time', '20m' setting 'http.content_type.required', 'true' + setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" + javaHome = project.file(project.ext.runtimeJavaHome) } } + tasks.register("${baseName}#oldClusterTest", RestTestRunnerTask) { + useCluster testClusters."${baseName}" + mustRunAfter(precommit) + doFirst { + project.delete("${buildDir}/cluster/shared/repo/${baseName}") + } - Task upgradedClusterTest = tasks.create(name: "${baseName}#upgradedClusterTest", type: RestIntegTestTask) { - dependsOn(oldClusterTest, "${baseName}#oldClusterTestCluster#node0.stop") - } - - configure(extensions.findByName("${baseName}#upgradedClusterTestCluster")) { - dependsOn oldClusterTest, - "${baseName}#oldClusterTestCluster#node0.stop", - "${baseName}#oldClusterTestCluster#node1.stop" - clusterName = 'full-cluster-restart' - - // some tests rely on the translog not being flushed - setting 'indices.memory.shard_inactive_time', '20m' - - numNodes = 2 - dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir } - cleanShared = false // We want to keep snapshots made by the old cluster! + systemProperty 'tests.is_old_cluster', 'true' } - tasks.getByName("${baseName}#upgradedClusterTestRunner").configure { + tasks.register("${baseName}#upgradedClusterTest", RestTestRunnerTask) { + useCluster testClusters."${baseName}" + dependsOn "${baseName}#oldClusterTest" + doFirst { + testClusters."${baseName}".goToNextVersion() + } systemProperty 'tests.is_old_cluster', 'false' - systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") - systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") - } - Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") { - dependsOn = [upgradedClusterTest] + tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach { + it.systemProperty 'tests.old_cluster_version', bwcVersion.toString().minus("-SNAPSHOT") + it.systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" + it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }") + it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }") } if (project.bwc_tests_enabled) { - bwcTest.dependsOn(versionBwcTest) + bwcTest.dependsOn( + tasks.register("${baseName}#bwcTest") { + dependsOn tasks.named("${baseName}#upgradedClusterTest") + } + ) } } @@ -116,4 +105,4 @@ task testJar(type: Jar) { artifacts { testArtifacts testJar -} \ No newline at end of file +} diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 13be323aee888..bc0c3368ee334 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -235,8 +235,6 @@ public void testClusterState() throws Exception { Map clusterState = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state"))); // Check some global properties: - String clusterName = (String) clusterState.get("cluster_name"); - assertEquals("full-cluster-restart", clusterName); String numberOfShards = (String) XContentMapValues.extractValue( "metadata.templates.template_1.settings.index.number_of_shards", clusterState); assertEquals("1", numberOfShards);