From ec23d1f2c72d656ca2b78dfbb8335c588bd0686a Mon Sep 17 00:00:00 2001 From: Ankit Kala <ankikala@amazon.com> Date: Tue, 28 Jun 2022 13:19:53 +0530 Subject: [PATCH 1/7] Add custom build.sh to publish the plugin zip (#434) Signed-off-by: Ankit Kala <ankikala@amazon.com> --- scripts/build.sh | 82 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100755 scripts/build.sh diff --git a/scripts/build.sh b/scripts/build.sh new file mode 100755 index 00000000..bd6ab31a --- /dev/null +++ b/scripts/build.sh @@ -0,0 +1,82 @@ +#!/bin/bash + +# +# Copyright OpenSearch Contributors +# SPDX-License-Identifier: Apache-2.0 +# + +set -ex + +function usage() { + echo "Usage: $0 [args]" + echo "" + echo "Arguments:" + echo -e "-v VERSION\t[Required] OpenSearch version." + echo -e "-q QUALIFIER\t[Optional] Version qualifier." + echo -e "-s SNAPSHOT\t[Optional] Build a snapshot, default is 'false'." + echo -e "-p PLATFORM\t[Optional] Platform, ignored." + echo -e "-a ARCHITECTURE\t[Optional] Build architecture, ignored." + echo -e "-o OUTPUT\t[Optional] Output path, default is 'artifacts'." + echo -e "-h help" +} + +while getopts ":h:v:q:s:o:p:a:" arg; do + case $arg in + h) + usage + exit 1 + ;; + v) + VERSION=$OPTARG + ;; + q) + QUALIFIER=$OPTARG + ;; + s) + SNAPSHOT=$OPTARG + ;; + o) + OUTPUT=$OPTARG + ;; + p) + PLATFORM=$OPTARG + ;; + a) + ARCHITECTURE=$OPTARG + ;; + :) + echo "Error: -${OPTARG} requires an argument" + usage + exit 1 + ;; + ?) + echo "Invalid option: -${arg}" + exit 1 + ;; + esac +done + +if [ -z "$VERSION" ]; then + echo "Error: You must specify the OpenSearch version" + usage + exit 1 +fi + +[[ ! -z "$QUALIFIER" ]] && VERSION=$VERSION-$QUALIFIER +[[ "$SNAPSHOT" == "true" ]] && VERSION=$VERSION-SNAPSHOT +[ -z "$OUTPUT" ] && OUTPUT=artifacts + +mkdir -p $OUTPUT + +./gradlew assemble --no-daemon --refresh-dependencies -DskipTests=true -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER + +zipPath=$(find . -path \*build/distributions/*.zip) +distributions="$(dirname "${zipPath}")" + +echo "COPY ${distributions}/*.zip" +mkdir -p $OUTPUT/plugins +cp ${distributions}/*.zip ./$OUTPUT/plugins + +./gradlew publishPluginZipPublicationToZipStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +mkdir -p $OUTPUT/maven/org/opensearch +cp -r ./build/local-staging-repo/org/opensearch/. $OUTPUT/maven/org/opensearch \ No newline at end of file From 6130d6feda5075a695bc384b2702a76d4db383a6 Mon Sep 17 00:00:00 2001 From: Gaurav Bafna <85113518+gbbafna@users.noreply.github.com> Date: Mon, 4 Jul 2022 12:22:33 +0530 Subject: [PATCH 2/7] Correctly updating the followerCheckpoint in stats api (#438) Summary : We need to update followerCheckpoint after writing to the follower index. Currently, we are not waiting for the writes and updating it with soon-to-be stale values Signed-off-by: Gaurav Bafna <gbbafna@amazon.com> --- .../task/shard/ShardReplicationTask.kt | 2 +- .../task/shard/TranslogSequencer.kt | 5 ++++ .../integ/rest/StartReplicationIT.kt | 1 + .../task/shard/TranslogSequencerTests.kt | 28 +++++++++++++------ 4 files changed, 26 insertions(+), 10 deletions(-) diff --git a/src/main/kotlin/org/opensearch/replication/task/shard/ShardReplicationTask.kt b/src/main/kotlin/org/opensearch/replication/task/shard/ShardReplicationTask.kt index a7418917..44493bc7 100644 --- a/src/main/kotlin/org/opensearch/replication/task/shard/ShardReplicationTask.kt +++ b/src/main/kotlin/org/opensearch/replication/task/shard/ShardReplicationTask.kt @@ -217,6 +217,7 @@ class ShardReplicationTask(id: Long, type: String, action: String, description: TaskId(clusterService.nodeName, id), client, indexShard.localCheckpoint, followerClusterStats) val changeTracker = ShardReplicationChangesTracker(indexShard, replicationSettings) + followerClusterStats.stats[followerShardId]!!.followerCheckpoint = indexShard.localCheckpoint coroutineScope { while (isActive) { rateLimiter.acquire() @@ -273,7 +274,6 @@ class ShardReplicationTask(id: Long, type: String, action: String, description: //hence renew retention lease with lastSyncedGlobalCheckpoint + 1 so that any shard that picks up shard replication task has data until then. try { retentionLeaseHelper.renewRetentionLease(leaderShardId, indexShard.lastSyncedGlobalCheckpoint + 1, followerShardId) - followerClusterStats.stats[followerShardId]!!.followerCheckpoint = indexShard.lastSyncedGlobalCheckpoint lastLeaseRenewalMillis = System.currentTimeMillis() } catch (ex: Exception) { when (ex) { diff --git a/src/main/kotlin/org/opensearch/replication/task/shard/TranslogSequencer.kt b/src/main/kotlin/org/opensearch/replication/task/shard/TranslogSequencer.kt index be5fe89c..38b625bf 100644 --- a/src/main/kotlin/org/opensearch/replication/task/shard/TranslogSequencer.kt +++ b/src/main/kotlin/org/opensearch/replication/task/shard/TranslogSequencer.kt @@ -28,6 +28,7 @@ import org.opensearch.client.Client import org.opensearch.common.logging.Loggers import org.opensearch.index.shard.ShardId import org.opensearch.index.translog.Translog +import org.opensearch.replication.util.indicesService import org.opensearch.tasks.TaskId import java.util.ArrayList import java.util.concurrent.ConcurrentHashMap @@ -55,6 +56,9 @@ class TranslogSequencer(scope: CoroutineScope, private val replicationMetadata: private val log = Loggers.getLogger(javaClass, followerShardId)!! private val completed = CompletableDeferred<Unit>() + val followerIndexService = indicesService.indexServiceSafe(followerShardId.index) + val indexShard = followerIndexService.getShard(followerShardId.id) + private val sequencer = scope.actor<Unit>(capacity = Channel.UNLIMITED) { // Exceptions thrown here will mark the channel as failed and the next attempt to send to the channel will // raise the same exception. See [SendChannel.close] method for details. @@ -88,6 +92,7 @@ class TranslogSequencer(scope: CoroutineScope, private val replicationMetadata: val tookInNanos = System.nanoTime() - relativeStartNanos followerClusterStats.stats[followerShardId]!!.totalWriteTime.addAndGet(TimeUnit.NANOSECONDS.toMillis(tookInNanos)) followerClusterStats.stats[followerShardId]!!.opsWritten.addAndGet(replayRequest.changes.size.toLong()) + followerClusterStats.stats[followerShardId]!!.followerCheckpoint = indexShard.localCheckpoint } highWatermark = next.changes.lastOrNull()?.seqNo() ?: highWatermark } diff --git a/src/test/kotlin/org/opensearch/replication/integ/rest/StartReplicationIT.kt b/src/test/kotlin/org/opensearch/replication/integ/rest/StartReplicationIT.kt index e9bc717e..4c50bf3a 100644 --- a/src/test/kotlin/org/opensearch/replication/integ/rest/StartReplicationIT.kt +++ b/src/test/kotlin/org/opensearch/replication/integ/rest/StartReplicationIT.kt @@ -1091,6 +1091,7 @@ class StartReplicationIT: MultiClusterRestTestCase() { assertThat(stats.getValue("operations_read").toString()).isEqualTo("50") assertThat(stats.getValue("failed_read_requests").toString()).isEqualTo("0") assertThat(stats.getValue("failed_write_requests").toString()).isEqualTo("0") + assertThat(stats.getValue("follower_checkpoint").toString()).isEqualTo((docCount-1).toString()) assertThat(stats.containsKey("index_stats")) assertThat(stats.size).isEqualTo(16) diff --git a/src/test/kotlin/org/opensearch/replication/task/shard/TranslogSequencerTests.kt b/src/test/kotlin/org/opensearch/replication/task/shard/TranslogSequencerTests.kt index ed5afb06..ac377687 100644 --- a/src/test/kotlin/org/opensearch/replication/task/shard/TranslogSequencerTests.kt +++ b/src/test/kotlin/org/opensearch/replication/task/shard/TranslogSequencerTests.kt @@ -11,32 +11,37 @@ package org.opensearch.replication.task.shard -import org.opensearch.replication.action.changes.GetChangesResponse -import org.opensearch.replication.action.replay.ReplayChangesAction -import org.opensearch.replication.action.replay.ReplayChangesRequest -import org.opensearch.replication.action.replay.ReplayChangesResponse -import org.opensearch.replication.metadata.ReplicationOverallState -import org.opensearch.replication.metadata.store.ReplicationContext -import org.opensearch.replication.metadata.store.ReplicationMetadata -import org.opensearch.replication.metadata.store.ReplicationStoreMetadataType import kotlinx.coroutines.ExperimentalCoroutinesApi import kotlinx.coroutines.ObsoleteCoroutinesApi import kotlinx.coroutines.test.runBlockingTest import org.assertj.core.api.Assertions.assertThat +import org.mockito.Mockito import org.opensearch.action.ActionListener import org.opensearch.action.ActionRequest import org.opensearch.action.ActionResponse import org.opensearch.action.ActionType import org.opensearch.action.support.replication.ReplicationResponse.ShardInfo import org.opensearch.common.settings.Settings +import org.opensearch.index.IndexService +import org.opensearch.index.shard.IndexShard import org.opensearch.index.shard.ShardId import org.opensearch.index.translog.Translog +import org.opensearch.indices.IndicesService +import org.opensearch.replication.action.changes.GetChangesResponse +import org.opensearch.replication.action.replay.ReplayChangesAction +import org.opensearch.replication.action.replay.ReplayChangesRequest +import org.opensearch.replication.action.replay.ReplayChangesResponse +import org.opensearch.replication.metadata.ReplicationOverallState +import org.opensearch.replication.metadata.store.ReplicationContext +import org.opensearch.replication.metadata.store.ReplicationMetadata +import org.opensearch.replication.metadata.store.ReplicationStoreMetadataType +import org.opensearch.replication.util.indicesService import org.opensearch.tasks.TaskId.EMPTY_TASK_ID import org.opensearch.test.OpenSearchTestCase -import org.opensearch.test.OpenSearchTestCase.randomList import org.opensearch.test.client.NoOpClient import java.util.Locale + @ObsoleteCoroutinesApi class TranslogSequencerTests : OpenSearchTestCase() { @@ -83,6 +88,11 @@ class TranslogSequencerTests : OpenSearchTestCase() { val stats = FollowerClusterStats() stats.stats[followerShardId] = FollowerShardMetric() val startSeqNo = randomNonNegativeLong() + indicesService = Mockito.mock(IndicesService::class.java) + val followerIndexService = Mockito.mock(IndexService::class.java) + val indexShard = Mockito.mock(IndexShard::class.java) + Mockito.`when`(indicesService.indexServiceSafe(followerShardId.index)).thenReturn(followerIndexService) + Mockito.`when`(followerIndexService.getShard(followerShardId.id)).thenReturn(indexShard) val sequencer = TranslogSequencer(this, replicationMetadata, followerShardId, leaderAlias, leaderIndex, EMPTY_TASK_ID, client, startSeqNo, stats) From 0008917266f1cce64236726842662058414b089f Mon Sep 17 00:00:00 2001 From: pgodithi <pgodithi@amazon.com> Date: Mon, 11 Jul 2022 15:55:17 -0400 Subject: [PATCH 3/7] Version increment automation Signed-off-by: pgodithi <pgodithi@amazon.com> --- build.gradle | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 4ff5e3fe..bc670b6c 100644 --- a/build.gradle +++ b/build.gradle @@ -894,4 +894,20 @@ publishing { } } } -} \ No newline at end of file +} + +// versionIncrement: Task to auto increment to the next development iteration +task versionIncrement { + onlyIf { System.getProperty('newVersion') } + doLast { + ext.newVersion = System.getProperty('newVersion') + println "Setting version to ${newVersion}." + // String tokenization to support -SNAPSHOT + ant.replaceregexp(match: opensearch_version.tokenize('-')[0], replace: newVersion.tokenize('-')[0], flags:'g', byline:true) { + fileset(dir: projectDir) { + // Include the required files that needs to be updated with new Version + include(name: "build.gradle") + } + } + } +} From 94943fd711d268c35db68db26c94984a8bf94c1f Mon Sep 17 00:00:00 2001 From: pgodithi <pgodithi@amazon.com> Date: Tue, 12 Jul 2022 18:46:03 -0400 Subject: [PATCH 4/7] Version increment automation: task rename updateVersion Signed-off-by: pgodithi <pgodithi@amazon.com> --- build.gradle | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/build.gradle b/build.gradle index bc670b6c..cd6045da 100644 --- a/build.gradle +++ b/build.gradle @@ -896,18 +896,13 @@ publishing { } } -// versionIncrement: Task to auto increment to the next development iteration -task versionIncrement { +// updateVersion: Task to auto increment to the next development iteration +task updateVersion { onlyIf { System.getProperty('newVersion') } doLast { ext.newVersion = System.getProperty('newVersion') println "Setting version to ${newVersion}." // String tokenization to support -SNAPSHOT - ant.replaceregexp(match: opensearch_version.tokenize('-')[0], replace: newVersion.tokenize('-')[0], flags:'g', byline:true) { - fileset(dir: projectDir) { - // Include the required files that needs to be updated with new Version - include(name: "build.gradle") - } - } + ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) } } From 08ad60bf28a3b9fa226871e97048a9cf496138c8 Mon Sep 17 00:00:00 2001 From: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> Date: Mon, 18 Jul 2022 09:32:04 +0000 Subject: [PATCH 5/7] Changing leader_cluster to leader_alias in readme file (#453) Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ee66e368..a404690c 100644 --- a/README.md +++ b/README.md @@ -106,7 +106,7 @@ curl -XPOST "http://${LEADER}/leader-01/_doc/1" -H 'Content-Type: application/js ```bash curl -XPUT "http://${FOLLOWER}/_plugins/_replication/follower-01/_start?pretty" \ -H 'Content-type: application/json' \ --d'{"leader_cluster":"leader-cluster", "leader_index": "leader-01"}' +-d'{"leader_alias":"leader-cluster", "leader_index": "leader-01"}' ``` ### Step 5: Make changes to data on leader index From 27898e2f166416992b2cf852903822352762c54e Mon Sep 17 00:00:00 2001 From: Ankit Kala <ankikala@amazon.com> Date: Tue, 19 Jul 2022 09:59:05 +0530 Subject: [PATCH 6/7] Use the published zip for security plugin (#455) Signed-off-by: Ankit Kala <ankikala@amazon.com> --- .github/workflows/security-tests.yml | 12 ------------ build.gradle | 18 +++++++++++++++++- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/.github/workflows/security-tests.yml b/.github/workflows/security-tests.yml index ee435d15..9b7bcb3f 100644 --- a/.github/workflows/security-tests.yml +++ b/.github/workflows/security-tests.yml @@ -22,18 +22,6 @@ jobs: # This step uses the checkout Github action: https://github.com/actions/checkout - name: Checkout Branch uses: actions/checkout@v2 - # Security plugin dependency - - name: Checkout security - uses: actions/checkout@v2 - with: - repository: 'opensearch-project/security' - path: security - ref: 'main' - - name: Build security - working-directory: ./security - run: | - ./gradlew clean build -Dbuild.snapshot=false -x test - cp build/distributions/opensearch-security-*.zip ../src/test/resources/security/plugin/opensearch-security.zip - name: Build and run Replication tests run: | ls -al src/test/resources/security/plugin diff --git a/build.gradle b/build.gradle index 4ff5e3fe..fbd17a12 100644 --- a/build.gradle +++ b/build.gradle @@ -56,6 +56,14 @@ buildscript { common_utils_version = System.getProperty("common_utils.version", opensearch_build) kotlin_version = System.getProperty("kotlin.version", "1.6.0") + // For fetching security zip from Maven. + // https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/2.1.0/latest/linux/x64/tar/builds/opensearch/plugins/opensearch-security-2.1.0.0.zip + opensearch_no_snapshot = opensearch_version.replace("-SNAPSHOT","") + security_no_snapshot = opensearch_build.replace("-SNAPSHOT","") + security_plugin_path = "build/dependencies/security" + security_plugin_download_url = 'https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/' + opensearch_no_snapshot + + '/latest/linux/x64/tar/builds/opensearch/plugins/opensearch-security-' + security_no_snapshot + '.zip' + } repositories { @@ -63,6 +71,7 @@ buildscript { mavenCentral() maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } maven { url "https://plugins.gradle.org/m2/" } + maven { url "https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/" } } dependencies { @@ -209,7 +218,14 @@ def securityPluginFile = new Callable<RegularFile>() { return new RegularFile() { @Override File getAsFile() { - return fileTree("$projectDir/src/test/resources/security/plugin/opensearch-security.zip").getSingleFile() + if (new File("$project.rootDir/$security_plugin_path").exists()) { + project.delete(files("$project.rootDir/$security_plugin_path")) + } + project.mkdir security_plugin_path + ant.get(src: security_plugin_download_url, + dest: security_plugin_path, + httpusecaches: false) + return fileTree(security_plugin_path).getSingleFile() } } } From db5029a1ce517d62141efe44b1a6ef624f3f143d Mon Sep 17 00:00:00 2001 From: Prudhvi Godithi <pgodithi@amazon.com> Date: Wed, 27 Jul 2022 03:28:09 -0400 Subject: [PATCH 7/7] Staging for version increment automation (#449) * Version increment automation Signed-off-by: pgodithi <pgodithi@amazon.com> * Version increment automation: task rename updateVersion Signed-off-by: pgodithi <pgodithi@amazon.com> --- build.gradle | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index fbd17a12..ea688661 100644 --- a/build.gradle +++ b/build.gradle @@ -910,4 +910,15 @@ publishing { } } } -} \ No newline at end of file +} + +// updateVersion: Task to auto increment to the next development iteration +task updateVersion { + onlyIf { System.getProperty('newVersion') } + doLast { + ext.newVersion = System.getProperty('newVersion') + println "Setting version to ${newVersion}." + // String tokenization to support -SNAPSHOT + ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) + } +}