diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java index 9e6984fd45007..a94ebacd460a5 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java @@ -404,11 +404,6 @@ public void upgradeAllNodesAndPluginsToNextVersion(List> p writeUnicastHostsFiles(); } - public void fullRestart() { - stop(false); - start(); - } - public void nextNodeToNextVersion() { OpenSearchNode node = upgradeNodeToNextVersion(); node.start(); diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java index 629e325427162..a67c5581cba92 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java @@ -1335,45 +1335,6 @@ public void testTurnOffTranslogRetentionAfterUpgraded() throws Exception { } } - public void testRecoveryWithTranslogRetentionDisabled() throws Exception { - if (isRunningAgainstOldCluster()) { - final Settings.Builder settings = Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1); - if (minimumNodeVersion().before(Version.V_2_0_0)) { - settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1"); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey(), "1kb"); - } - createIndex(index, settings.build()); - ensureGreen(index); - int numDocs = randomIntBetween(0, 100); - for (int i = 0; i < numDocs; i++) { - indexDocument(Integer.toString(i)); - if (rarely()) { - flush(index, randomBoolean()); - } - } - client().performRequest(new Request("POST", "/" + index + "/_refresh")); - if (randomBoolean()) { - ensurePeerRecoveryRetentionLeasesRenewedAndSynced(index); - } - if (randomBoolean()) { - flush(index, randomBoolean()); - } else if (randomBoolean()) { - syncedFlush(index, randomBoolean()); - } - saveInfoDocument("doc_count", Integer.toString(numDocs)); - } - ensureGreen(index); - final int numDocs = Integer.parseInt(loadInfoDocument("doc_count")); - assertTotalHits(numDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); - } - public void testResize() throws Exception { int numDocs; if (isRunningAgainstOldCluster()) { diff --git a/qa/translog-policy/build.gradle b/qa/translog-policy/build.gradle deleted file mode 100644 index 5ef7774045e16..0000000000000 --- a/qa/translog-policy/build.gradle +++ /dev/null @@ -1,117 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.opensearch.gradle.Version -import org.opensearch.gradle.info.BuildParams -import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask - -apply plugin: 'opensearch.testclusters' -apply plugin: 'opensearch.standalone-test' -apply from : "$rootDir/gradle/bwc-test.gradle" - -for (Version bwcVersion : BuildParams.bwcVersions.indexCompatible) { - if (bwcVersion.before('6.3.0')) { - // explicitly running restart on the current node does not work in step 2 - // below when plugins are installed, which is the case for some plugins - // prior to 6.3.0 - continue - } - String baseName = "v${bwcVersion}" - - testClusters { - "${baseName}" { - versions = [bwcVersion.toString(), project.version] - numberOfNodes = 2 - setting 'http.content_type.required', 'true' - } - } - - tasks.register("${baseName}#Step1OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - mustRunAfter(precommit) - systemProperty 'tests.test_step', 'step1' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step2OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step1OldClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step2' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step3NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step2OldClusterTest" - doFirst { - testClusters."${baseName}".goToNextVersion() - } - systemProperty 'tests.test_step', 'step3' - systemProperty 'tests.is_old_cluster', 'false' - } - - tasks.register("${baseName}#Step4NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step3NewClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step4' - systemProperty 'tests.is_old_cluster', 'false' - } - - String oldVersion = bwcVersion.toString().minus("-SNAPSHOT") - tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach { - it.systemProperty 'tests.old_cluster_version', oldVersion - it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}") - it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}") - } - - tasks.register(bwcTaskName(bwcVersion)) { - dependsOn tasks.named("${baseName}#Step4NewClusterTest") - } -} - -configurations { - testArtifacts.extendsFrom testRuntime -} - -task testJar(type: Jar) { - archiveAppendix = 'test' - from sourceSets.test.output -} - -artifacts { - testArtifacts testJar -} diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 1edd0c67c3317..2c54b726348de 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -103,7 +103,6 @@ import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.OpenSearchMergePolicy; import org.opensearch.index.shard.ShardId; -import org.opensearch.index.store.Store; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; import org.opensearch.index.translog.TranslogCorruptedException; @@ -115,7 +114,6 @@ import java.io.Closeable; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -252,7 +250,7 @@ public InternalEngine(EngineConfig engineConfig) { mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings()); throttle = new IndexThrottle(); try { - trimUnsafeCommits(engineConfig); + store.trimUnsafeCommits(engineConfig.getTranslogConfig().getTranslogPath()); translog = openTranslog(engineConfig, translogDeletionPolicy, engineConfig.getGlobalCheckpointSupplier(), seqNo -> { final LocalCheckpointTracker tracker = getLocalCheckpointTracker(); assert tracker != null || getTranslog().isOpen() == false; @@ -2955,15 +2953,6 @@ private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean a return true; } - private static void trimUnsafeCommits(EngineConfig engineConfig) throws IOException { - final Store store = engineConfig.getStore(); - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); - final Path translogPath = engineConfig.getTranslogConfig().getTranslogPath(); - final long globalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); - final long minRetainedTranslogGen = Translog.readMinTranslogGeneration(translogPath, translogUUID); - store.trimUnsafeCommits(globalCheckpoint, minRetainedTranslogGen, engineConfig.getIndexSettings().getIndexVersionCreated()); - } - /** * Restores the live version map and local checkpoint of this engine using documents (including soft-deleted) * after the local checkpoint in the safe commit. This step ensures the live version map and checkpoint tracker diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index 86f007c61a684..2b47c5845a394 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -1597,27 +1597,16 @@ public void ensureIndexHasHistoryUUID() throws IOException { * commit on the replica will cause exception as the new last commit c3 will have recovery_translog_gen=1. The recovery * translog generation of a commit is calculated based on the current local checkpoint. The local checkpoint of c3 is 1 * while the local checkpoint of c2 is 2. - *

- * 3. Commit without translog can be used in recovery. An old index, which was created before multiple-commits is introduced - * (v6.2), may not have a safe commit. If that index has a snapshotted commit without translog and an unsafe commit, - * the policy can consider the snapshotted commit as a safe commit for recovery even the commit does not have translog. */ - public void trimUnsafeCommits( - final long lastSyncedGlobalCheckpoint, - final long minRetainedTranslogGen, - final org.opensearch.Version indexVersionCreated - ) throws IOException { + public void trimUnsafeCommits(final Path translogPath) throws IOException { metadataLock.writeLock().lock(); try { final List existingCommits = DirectoryReader.listCommits(directory); - if (existingCommits.isEmpty()) { - throw new IllegalArgumentException("No index found to trim"); - } - final IndexCommit lastIndexCommitCommit = existingCommits.get(existingCommits.size() - 1); - final String translogUUID = lastIndexCommitCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); - final IndexCommit startingIndexCommit; - // TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint. - startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); + assert existingCommits.isEmpty() == false : "No index found to trim"; + final IndexCommit lastIndexCommit = existingCommits.get(existingCommits.size() - 1); + final String translogUUID = lastIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); + final long lastSyncedGlobalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); + final IndexCommit startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); if (translogUUID.equals(startingIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY)) == false) { throw new IllegalStateException( @@ -1628,7 +1617,7 @@ public void trimUnsafeCommits( + "]" ); } - if (startingIndexCommit.equals(lastIndexCommitCommit) == false) { + if (startingIndexCommit.equals(lastIndexCommit) == false) { try (IndexWriter writer = newAppendingIndexWriter(directory, startingIndexCommit)) { // this achieves two things: // - by committing a new commit based on the starting commit, it make sure the starting commit will be opened diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 359f73ff3d555..0bd47902c89ed 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -6094,7 +6094,7 @@ public void testTrimUnsafeCommits() throws Exception { minTranslogGen = engine.getTranslog().getMinFileGeneration(); } - store.trimUnsafeCommits(globalCheckpoint.get(), minTranslogGen, config.getIndexSettings().getIndexVersionCreated()); + store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); long safeMaxSeqNo = commitMaxSeqNo.stream() .filter(s -> s <= globalCheckpoint.get()) .reduce((s1, s2) -> s2) // get the last one. diff --git a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java index 9603b63337842..27369e79e5dee 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java @@ -968,10 +968,7 @@ protected static void createIndex(String name, Settings settings, String mapping entity += "}"; if (settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey()) - || settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey())) { - expectTranslogRetentionWarning(request); - } + } request.setJsonEntity(entity); client().performRequest(request); } @@ -1025,21 +1022,6 @@ protected static void expectSoftDeletesWarning(Request request, String indexName } } - protected static void expectTranslogRetentionWarning(Request request) { - final List expectedWarnings = Collections.singletonList( - "Translog retention settings [index.translog.retention.age] " - + "and [index.translog.retention.size] are deprecated and effectively ignored. They will be removed in a future version." - ); - final Builder requestOptions = RequestOptions.DEFAULT.toBuilder(); - if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } - } - protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true");