diff --git a/.ci/bwcVersions b/.ci/bwcVersions
index 535dea1da3c28..7173155220a67 100644
--- a/.ci/bwcVersions
+++ b/.ci/bwcVersions
@@ -54,6 +54,9 @@ BWC_VERSION:
- "1.3.15"
- "1.3.16"
- "1.3.17"
+ - "1.3.18"
+ - "1.3.19"
+ - "1.3.20"
- "2.0.0"
- "2.0.1"
- "2.0.2"
@@ -88,3 +91,8 @@ BWC_VERSION:
- "2.13.1"
- "2.14.0"
- "2.14.1"
+ - "2.15.0"
+ - "2.15.1"
+ - "2.16.0"
+ - "2.16.1"
+ - "2.17.0"
diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties
index f73122ee21a6b..e290bda773f68 100644
--- a/.ci/java-versions.properties
+++ b/.ci/java-versions.properties
@@ -13,7 +13,8 @@
# build and test OpenSearch for this branch. Valid Java versions
# are 'java' or 'openjdk' followed by the major release number.
-OPENSEARCH_BUILD_JAVA=openjdk11
+# See please https://docs.gradle.org/8.10/userguide/upgrading_version_8.html#minimum_daemon_jvm_version
+OPENSEARCH_BUILD_JAVA=openjdk17
OPENSEARCH_RUNTIME_JAVA=java11
GRADLE_TASK=build
GRADLE_EXTRA_ARGS=
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index b5b0a815b02b2..18a310862dfbb 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -11,17 +11,27 @@
# 3. Use the command palette to run the CODEOWNERS: Show owners of current file command, which will display all code owners for the current file.
# Default ownership for all repo files
-* @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @tlfeng @VachaShah
+* @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jainankitk @kotwanikunal @linuxpi @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+/modules/lang-painless/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+/modules/parent-join/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
/modules/transport-netty4/ @peternied
/plugins/identity-shiro/ @peternied
+/server/src/internalClusterTest/java/org/opensearch/index/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+/server/src/internalClusterTest/java/org/opensearch/search/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+
/server/src/main/java/org/opensearch/extensions/ @peternied
/server/src/main/java/org/opensearch/identity/ @peternied
-/server/src/main/java/org/opensearch/threadpool/ @peternied
+/server/src/main/java/org/opensearch/index/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+/server/src/main/java/org/opensearch/search/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+/server/src/main/java/org/opensearch/threadpool/ @jed326 @peternied
/server/src/main/java/org/opensearch/transport/ @peternied
-/.github/ @peternied
+/server/src/test/java/org/opensearch/index/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+/server/src/test/java/org/opensearch/search/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
+
+/.github/ @jed326 @peternied
-/MAINTAINERS.md @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @peternied @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @tlfeng @VachaShah
+/MAINTAINERS.md @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gaobinlong @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @peternied @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah
diff --git a/.github/ISSUE_TEMPLATE/failed_check.md b/.github/ISSUE_TEMPLATE/failed_check.md
deleted file mode 100644
index 71508c9f5bd43..0000000000000
--- a/.github/ISSUE_TEMPLATE/failed_check.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-title: '[AUTOCUT] Gradle Check Failure on push to {{ env.branch_name }}'
-labels: '>test-failure, bug, autocut'
----
-
-Gradle check has failed on push of your commit {{ env.pr_from_sha }}.
-Please examine the workflow log {{ env.workflow_url }}.
-Is the failure [a flaky test](https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#flaky-tests) unrelated to your change?
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 7a4119a763b09..5476637b84e92 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -1,18 +1,21 @@
+
+
### Description
[Describe what this change achieves]
-### Issues Resolved
-[List any issues this PR will resolve]
+### Related Issues
+Resolves #[Issue number to be closed when this PR is merged]
+
### Check List
-- [ ] New functionality includes testing.
- - [ ] All tests pass
-- [ ] New functionality has been documented.
- - [ ] New functionality has javadoc added
-- [ ] Failing checks are inspected and point to the corresponding known issue(s) (See: [Troubleshooting Failing Builds](../blob/main/CONTRIBUTING.md#troubleshooting-failing-builds))
-- [ ] Commits are signed per the DCO using --signoff
-- [ ] Commit changes are listed out in CHANGELOG.md file (See: [Changelog](../blob/main/CONTRIBUTING.md#changelog))
-- [ ] Public documentation issue/PR [created](https://github.com/opensearch-project/documentation-website/issues/new/choose)
+- [ ] Functionality includes testing.
+- [ ] API changes companion pull request [created](https://github.com/opensearch-project/opensearch-api-specification/blob/main/DEVELOPER_GUIDE.md), if applicable.
+- [ ] Public documentation issue/PR [created](https://github.com/opensearch-project/documentation-website/issues/new/choose), if applicable.
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
For more information on following Developer Certificate of Origin and signing off your commits, please check [here](https://github.com/opensearch-project/OpenSearch/blob/main/CONTRIBUTING.md#developer-certificate-of-origin).
diff --git a/.github/workflows/assemble.yml b/.github/workflows/assemble.yml
index d18170e9ea6b7..294627622a136 100644
--- a/.github/workflows/assemble.yml
+++ b/.github/workflows/assemble.yml
@@ -16,9 +16,33 @@ jobs:
with:
java-version: ${{ matrix.java }}
distribution: temurin
+ - name: Set up JDK 17
+ # See please https://docs.gradle.org/8.10/userguide/upgrading_version_8.html#minimum_daemon_jvm_version
+ if: matrix.java == 11
+ uses: actions/setup-java@v4
+ with:
+ java-version: 17
+ distribution: temurin
+ - name: Set JAVA${{ matrix.java }}_HOME
+ shell: bash
+ run: |
+ echo "JAVA${{ matrix.java }}_HOME=$JAVA_HOME_${{ matrix.java }}_${{ runner.arch }}" >> $GITHUB_ENV
- name: Setup docker (missing on MacOS)
+ id: setup_docker
if: runner.os == 'macos'
- uses: douglascamata/setup-docker-macos-action@main
+ run: |
+ exit 0;
+ - name: Run Gradle (assemble)
+ if: runner.os == 'macos' && steps.setup_docker.outcome != 'success'
+ run: |
+ # Report success even if previous step failed (Docker on MacOS runner is very unstable)
+ exit 0;
+ - name: Run Gradle (assemble)
+ shell: bash
+ if: runner.os != 'macos'
+ run: |
+ ./gradlew assemble --parallel --no-build-cache -PDISABLE_BUILD_CACHE -Druntime.java=${{ matrix.java }}
- name: Run Gradle (assemble)
+ if: runner.os == 'macos' && steps.setup_docker.outcome == 'success'
run: |
- ./gradlew assemble --parallel --no-build-cache -PDISABLE_BUILD_CACHE
+ exit 0;
diff --git a/.github/workflows/dco.yml b/.github/workflows/dco.yml
new file mode 100644
index 0000000000000..ef842bb405d60
--- /dev/null
+++ b/.github/workflows/dco.yml
@@ -0,0 +1,19 @@
+name: Developer Certificate of Origin Check
+
+on: [pull_request]
+
+jobs:
+ dco-check:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Get PR Commits
+ id: 'get-pr-commits'
+ uses: tim-actions/get-pr-commits@v1.3.1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ - name: DCO Check
+ uses: tim-actions/dco@v1.1.0
+ with:
+ commits: ${{ steps.get-pr-commits.outputs.commits }}
+
diff --git a/.github/workflows/dependabot_pr.yml b/.github/workflows/dependabot_pr.yml
index e265a5933b845..bf0c0127805b1 100644
--- a/.github/workflows/dependabot_pr.yml
+++ b/.github/workflows/dependabot_pr.yml
@@ -22,6 +22,13 @@ jobs:
with:
token: ${{ steps.github_app_token.outputs.token }}
+ # See please https://docs.gradle.org/8.10/userguide/upgrading_version_8.html#minimum_daemon_jvm_version
+ - name: Set up JDK 17
+ uses: actions/setup-java@v4
+ with:
+ java-version: 17
+ distribution: temurin
+
- name: Update Gradle SHAs
run: |
./gradlew updateSHAs
diff --git a/.github/workflows/gradle-check.yml b/.github/workflows/gradle-check.yml
index f610ff5c31049..1b9b30625eb83 100644
--- a/.github/workflows/gradle-check.yml
+++ b/.github/workflows/gradle-check.yml
@@ -12,13 +12,28 @@ permissions:
contents: read # to fetch code (actions/checkout)
jobs:
+ check-files:
+ runs-on: ubuntu-latest
+ outputs:
+ RUN_GRADLE_CHECK: ${{ steps.changed-files-specific.outputs.any_changed }}
+ steps:
+ - uses: actions/checkout@v4
+ - name: Get changed files
+ id: changed-files-specific
+ uses: tj-actions/changed-files@v45
+ with:
+ files_ignore: |
+ release-notes/*.md
+ .github/**
+ *.md
+
gradle-check:
- if: github.repository == 'opensearch-project/OpenSearch'
+ needs: check-files
+ if: github.repository == 'opensearch-project/OpenSearch' && needs.check-files.outputs.RUN_GRADLE_CHECK == 'true'
permissions:
contents: read # to fetch code (actions/checkout)
pull-requests: write # to create or update comment (peter-evans/create-or-update-comment)
issues: write # To create an issue if check fails on push.
-
runs-on: ubuntu-latest
timeout-minutes: 130
steps:
@@ -98,6 +113,7 @@ jobs:
if: success()
uses: codecov/codecov-action@v4
with:
+ token: ${{ secrets.CODECOV_TOKEN }}
files: ./codeCoverage.xml
- name: Create Comment Success
@@ -143,11 +159,11 @@ jobs:
Please examine the workflow log, locate, and copy-paste the failure(s) below, then iterate to green. Is the failure [a flaky test](https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#flaky-tests) unrelated to your change?
- - name: Create Issue On Push Failure
- if: ${{ github.event_name == 'push' && failure() }}
- uses: dblock/create-a-github-issue@v3
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- assignees: ${{ github.event.head_commit.author.username }}, ${{ github.triggering_actor }}
- filename: .github/ISSUE_TEMPLATE/failed_check.md
+ check-result:
+ needs: [check-files, gradle-check]
+ if: always()
+ runs-on: ubuntu-latest
+ steps:
+ - name: Fail if gradle-check fails
+ if: ${{ needs.check-files.outputs.RUN_GRADLE_CHECK && needs.gradle-check.result == 'failure' }}
+ run: exit 1
diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml
index 95ca49ac9cb43..7c65df1f677a5 100644
--- a/.github/workflows/precommit.yml
+++ b/.github/workflows/precommit.yml
@@ -8,7 +8,7 @@ jobs:
strategy:
matrix:
java: [ 11, 17, 21 ]
- os: [ubuntu-latest, windows-latest, macos-13]
+ os: [ubuntu-latest, windows-latest, macos-latest, macos-13]
steps:
- uses: actions/checkout@v4
- name: Set up JDK ${{ matrix.java }}
@@ -17,6 +17,18 @@ jobs:
java-version: ${{ matrix.java }}
distribution: temurin
cache: gradle
+ - name: Set up JDK 17
+ # See please https://docs.gradle.org/8.10/userguide/upgrading_version_8.html#minimum_daemon_jvm_version
+ if: matrix.java == 11
+ uses: actions/setup-java@v4
+ with:
+ java-version: 17
+ distribution: temurin
+ - name: Set JAVA${{ matrix.java }}_HOME
+ shell: bash
+ run: |
+ echo "JAVA${{ matrix.java }}_HOME=$JAVA_HOME_${{ matrix.java }}_${{ runner.arch }}" >> $GITHUB_ENV
- name: Run Gradle (precommit)
+ shell: bash
run: |
- ./gradlew javadoc precommit --parallel
+ ./gradlew javadoc precommit --parallel -Druntime.java=${{ matrix.java }}
diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml
index fd91bef553d1c..eb80b5a1c6ff1 100644
--- a/.github/workflows/version.yml
+++ b/.github/workflows/version.yml
@@ -127,3 +127,32 @@ jobs:
title: '[AUTO] [main] Add bwc version ${{ env.NEXT_VERSION }}.'
body: |
I've noticed that a new tag ${{ env.TAG }} was pushed, and added a bwc version ${{ env.NEXT_VERSION }}.
+
+ - name: Create tracking issue
+ id: create-issue
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const body = `
+ ### Description
+ A new version of OpenSearch was released, to prepare for the next release new version numbers need to be updated in all active branches of development.
+
+ ### Exit Criteria
+ Review and merged the following pull requests
+ - [ ] ${{ steps.base_pr.outputs.pull-request-url }}
+ - [ ] ${{ steps.base_x_pr.outputs.pull-request-url }}
+ - [ ] ${{ steps.main_pr.outputs.pull-request-url }}
+
+ ### Additional Context
+ See project wide guidance on branching and versions [[link]](https://github.com/opensearch-project/.github/blob/main/RELEASING.md).
+ `
+ const { data: issue }= await github.rest.issues.create({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ labels: ["Build"],
+ title: "Increment version for ${{ env.NEXT_VERSION }}",
+ body: body
+ });
+ console.error(JSON.stringify(issue));
+ return issue.number;
+ result-encoding: string
diff --git a/.whitesource b/.whitesource
new file mode 100644
index 0000000000000..bb071b4a2b1ce
--- /dev/null
+++ b/.whitesource
@@ -0,0 +1,45 @@
+{
+ "scanSettings": {
+ "configMode": "AUTO",
+ "configExternalURL": "",
+ "projectToken": "",
+ "baseBranches": []
+ },
+ "scanSettingsSAST": {
+ "enableScan": false,
+ "scanPullRequests": false,
+ "incrementalScan": true,
+ "baseBranches": [],
+ "snippetSize": 10
+ },
+ "checkRunSettings": {
+ "vulnerableCheckRunConclusionLevel": "failure",
+ "displayMode": "diff",
+ "useMendCheckNames": true
+ },
+ "checkRunSettingsSAST": {
+ "checkRunConclusionLevel": "failure",
+ "severityThreshold": "high"
+ },
+ "issueSettings": {
+ "minSeverityLevel": "LOW",
+ "issueType": "DEPENDENCY"
+ },
+ "issueSettingsSAST": {
+ "minSeverityLevel": "high",
+ "issueType": "repo"
+ },
+ "remediateSettings": {
+ "workflowRules": {
+ "enabled": true
+ }
+ },
+ "imageSettings":{
+ "imageTracing":{
+ "enableImageTracingPR": false,
+ "addRepositoryCoordinate": false,
+ "addDockerfilePath": false,
+ "addMendIdentifier": false
+ }
+ }
+}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c8c77bd846640..886c082053fd6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,42 +5,106 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
## [Unreleased 2.x]
### Added
-- Add support for Azure Managed Identity in repository-azure ([#12423](https://github.com/opensearch-project/OpenSearch/issues/12423))
-- Add useCompoundFile index setting ([#13478](https://github.com/opensearch-project/OpenSearch/pull/13478))
-- Make outbound side of transport protocol dependent ([#13293](https://github.com/opensearch-project/OpenSearch/pull/13293))
-- [Remote Store] Upload translog checkpoint as object metadata to translog.tlog([#13637](https://github.com/opensearch-project/OpenSearch/pull/13637))
-- [Remote Store] Add dynamic cluster settings to set timeout for segments upload to Remote Store ([#13679](https://github.com/opensearch-project/OpenSearch/pull/13679))
+- [Workload Management] Add Settings for Workload Management feature ([#15028](https://github.com/opensearch-project/OpenSearch/pull/15028))
+- Fix for hasInitiatedFetching to fix allocation explain and manual reroute APIs (([#14972](https://github.com/opensearch-project/OpenSearch/pull/14972))
+- [Workload Management] Add queryGroupId to Task ([14708](https://github.com/opensearch-project/OpenSearch/pull/14708))
+- Add setting to ignore throttling nodes for allocation of unassigned primaries in remote restore ([#14991](https://github.com/opensearch-project/OpenSearch/pull/14991))
+- [Workload Management] Add Delete QueryGroup API Logic ([#14735](https://github.com/opensearch-project/OpenSearch/pull/14735))
+- [Streaming Indexing] Enhance RestClient with a new streaming API support ([#14437](https://github.com/opensearch-project/OpenSearch/pull/14437))
+- Add basic aggregation support for derived fields ([#14618](https://github.com/opensearch-project/OpenSearch/pull/14618))
+- [Workload Management] Add Create QueryGroup API Logic ([#14680](https://github.com/opensearch-project/OpenSearch/pull/14680))- [Workload Management] Add Create QueryGroup API Logic ([#14680](https://github.com/opensearch-project/OpenSearch/pull/14680))
+- Add ThreadContextPermission for markAsSystemContext and allow core to perform the method ([#15016](https://github.com/opensearch-project/OpenSearch/pull/15016))
+- Add ThreadContextPermission for stashAndMergeHeaders and stashWithOrigin ([#15039](https://github.com/opensearch-project/OpenSearch/pull/15039))
+- [Concurrent Segment Search] Support composite aggregations with scripting ([#15072](https://github.com/opensearch-project/OpenSearch/pull/15072))
+- Add `rangeQuery` and `regexpQuery` for `constant_keyword` field type ([#14711](https://github.com/opensearch-project/OpenSearch/pull/14711))
+- Add took time to request nodes stats ([#15054](https://github.com/opensearch-project/OpenSearch/pull/15054))
+- [Workload Management] Add Get QueryGroup API Logic ([14709](https://github.com/opensearch-project/OpenSearch/pull/14709))
+- [Workload Management] Add Update QueryGroup API Logic ([#14775](https://github.com/opensearch-project/OpenSearch/pull/14775))
+- [Workload Management] QueryGroup resource tracking framework changes ([#13897](https://github.com/opensearch-project/OpenSearch/pull/13897))
+- Support filtering on a large list encoded by bitmap ([#14774](https://github.com/opensearch-project/OpenSearch/pull/14774))
+- Add slice execution listeners to SearchOperationListener interface ([#15153](https://github.com/opensearch-project/OpenSearch/pull/15153))
+- Make balanced shards allocator timebound ([#15239](https://github.com/opensearch-project/OpenSearch/pull/15239))
+- Add allowlist setting for ingest-geoip and ingest-useragent ([#15325](https://github.com/opensearch-project/OpenSearch/pull/15325))
+- Adding access to noSubMatches and noOverlappingMatches in Hyphenation ([#13895](https://github.com/opensearch-project/OpenSearch/pull/13895))
+- Star tree mapping changes ([#14605](https://github.com/opensearch-project/OpenSearch/pull/14605))
+- Add support for index level max slice count setting for concurrent segment search ([#15336](https://github.com/opensearch-project/OpenSearch/pull/15336))
+- Support cancellation for cat shards and node stats API.([#13966](https://github.com/opensearch-project/OpenSearch/pull/13966))
+- [Streaming Indexing] Introduce bulk HTTP API streaming flavor ([#15381](https://github.com/opensearch-project/OpenSearch/pull/15381))
+- Add support for centralize snapshot creation with pinned timestamp ([#15124](https://github.com/opensearch-project/OpenSearch/pull/15124))
+- Add concurrent search support for Derived Fields ([#15326](https://github.com/opensearch-project/OpenSearch/pull/15326))
+- [Workload Management] Add query group stats constructs ([#15343](https://github.com/opensearch-project/OpenSearch/pull/15343)))
+- Add limit on number of processors for Ingest pipeline([#15460](https://github.com/opensearch-project/OpenSearch/pull/15465)).
+- Add runAs to Subject interface and introduce IdentityAwarePlugin extension point ([#14630](https://github.com/opensearch-project/OpenSearch/pull/14630))
+- [Workload Management] Add rejection logic for co-ordinator and shard level requests ([#15428](https://github.com/opensearch-project/OpenSearch/pull/15428)))
+- Adding translog durability validation in index templates ([#15494](https://github.com/opensearch-project/OpenSearch/pull/15494))
+- [Range Queries] Add new approximateable query framework to short-circuit range queries ([#13788](https://github.com/opensearch-project/OpenSearch/pull/13788))
+- [Workload Management] Add query group level failure tracking ([#15227](https://github.com/opensearch-project/OpenSearch/pull/15527))
+- [Reader Writer Separation] Add experimental search replica shard type to achieve reader writer separation ([#15237](https://github.com/opensearch-project/OpenSearch/pull/15237))
+- Add index creation using the context field ([#15290](https://github.com/opensearch-project/OpenSearch/pull/15290))
+- [Remote Publication] Add remote download stats ([#15291](https://github.com/opensearch-project/OpenSearch/pull/15291)))
+- Add support to upload snapshot shard blobs with hashed prefix ([#15426](https://github.com/opensearch-project/OpenSearch/pull/15426))
+- Add prefix support to hashed prefix & infix path types on remote store ([#15557](https://github.com/opensearch-project/OpenSearch/pull/15557))
+- Add canRemain method to TargetPoolAllocationDecider to move shards from local to remote pool for hot to warm tiering ([#15010](https://github.com/opensearch-project/OpenSearch/pull/15010))
+- Add support for pluggable deciders for concurrent search ([#15363](https://github.com/opensearch-project/OpenSearch/pull/15363))
+- Add support for comma-separated list of index names to be used with Snapshot Status API ([#15409](https://github.com/opensearch-project/OpenSearch/pull/15409))[SnapshotV2] Snapshot Status API changes (#15409))
+- Optimise snapshot deletion to speed up snapshot deletion and creation ([#15568](https://github.com/opensearch-project/OpenSearch/pull/15568))
+- [Remote Publication] Added checksum validation for cluster state behind a cluster setting ([#15218](https://github.com/opensearch-project/OpenSearch/pull/15218))
+- Relax the join validation for Remote State publication ([#15471](https://github.com/opensearch-project/OpenSearch/pull/15471))
+- Optimize NodeIndicesStats output behind flag ([#14454](https://github.com/opensearch-project/OpenSearch/pull/14454))
### Dependencies
-- Bump `com.github.spullara.mustache.java:compiler` from 0.9.10 to 0.9.13 ([#13329](https://github.com/opensearch-project/OpenSearch/pull/13329), [#13559](https://github.com/opensearch-project/OpenSearch/pull/13559))
-- Bump `org.apache.commons:commons-text` from 1.11.0 to 1.12.0 ([#13557](https://github.com/opensearch-project/OpenSearch/pull/13557))
-- Bump `org.hdrhistogram:HdrHistogram` from 2.1.12 to 2.2.1 ([#13556](https://github.com/opensearch-project/OpenSearch/pull/13556))
-- Bump `com.gradle.enterprise` from 3.17.2 to 3.17.4 ([#13641](https://github.com/opensearch-project/OpenSearch/pull/13641), [#13753](https://github.com/opensearch-project/OpenSearch/pull/13753))
-- Bump `org.apache.hadoop:hadoop-minicluster` from 3.3.6 to 3.4.0 ([#13642](https://github.com/opensearch-project/OpenSearch/pull/13642))
-- Bump `mockito` from 5.11.0 to 5.12.0 ([#13665](https://github.com/opensearch-project/OpenSearch/pull/13665))
-- Bump `com.google.code.gson:gson` from 2.10.1 to 2.11.0 ([#13752](https://github.com/opensearch-project/OpenSearch/pull/13752))
-- Bump `ch.qos.logback:logback-core` from 1.5.3 to 1.5.6 ([#13756](https://github.com/opensearch-project/OpenSearch/pull/13756))
-- Bump `netty` from 4.1.109.Final to 4.1.110.Final ([#13802](https://github.com/opensearch-project/OpenSearch/pull/13802))
-- Bump `jackson` from 2.17.0 to 2.17.1 ([#13817](https://github.com/opensearch-project/OpenSearch/pull/13817))
-- Bump `reactor` from 3.5.15 to 3.5.17 ([#13825](https://github.com/opensearch-project/OpenSearch/pull/13825))
-- Bump `reactor-netty` from 1.1.17 to 1.1.19 ([#13825](https://github.com/opensearch-project/OpenSearch/pull/13825))
-- Bump `commons-cli:commons-cli` from 1.7.0 to 1.8.0 ([#13840](https://github.com/opensearch-project/OpenSearch/pull/13840))
+- Bump `netty` from 4.1.111.Final to 4.1.112.Final ([#15081](https://github.com/opensearch-project/OpenSearch/pull/15081))
+- Bump `org.apache.commons:commons-lang3` from 3.14.0 to 3.15.0 ([#14861](https://github.com/opensearch-project/OpenSearch/pull/14861))
+- OpenJDK Update (July 2024 Patch releases) ([#14998](https://github.com/opensearch-project/OpenSearch/pull/14998))
+- Bump `com.microsoft.azure:msal4j` from 1.16.1 to 1.17.0 ([#14995](https://github.com/opensearch-project/OpenSearch/pull/14995), [#15420](https://github.com/opensearch-project/OpenSearch/pull/15420))
+- Bump `actions/github-script` from 6 to 7 ([#14997](https://github.com/opensearch-project/OpenSearch/pull/14997))
+- Bump `org.tukaani:xz` from 1.9 to 1.10 ([#15110](https://github.com/opensearch-project/OpenSearch/pull/15110))
+- Bump `org.apache.avro:avro` from 1.11.3 to 1.12.0 in /plugins/repository-hdfs ([#15119](https://github.com/opensearch-project/OpenSearch/pull/15119))
+- Bump `org.bouncycastle:bcpg-fips` from 1.0.7.1 to 2.0.9 ([#15103](https://github.com/opensearch-project/OpenSearch/pull/15103), [#15299](https://github.com/opensearch-project/OpenSearch/pull/15299))
+- Bump `com.azure:azure-core` from 1.49.1 to 1.51.0 ([#15111](https://github.com/opensearch-project/OpenSearch/pull/15111))
+- Bump `org.xerial.snappy:snappy-java` from 1.1.10.5 to 1.1.10.6 ([#15207](https://github.com/opensearch-project/OpenSearch/pull/15207))
+- Bump `com.azure:azure-xml` from 1.0.0 to 1.1.0 ([#15206](https://github.com/opensearch-project/OpenSearch/pull/15206))
+- Bump `reactor` from 3.5.19 to 3.5.20 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262))
+- Bump `reactor-netty` from 1.1.21 to 1.1.22 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262))
+- Bump `org.apache.kerby:kerb-admin` from 2.0.3 to 2.1.0 ([#15301](https://github.com/opensearch-project/OpenSearch/pull/15301))
+- Bump `com.azure:azure-core-http-netty` from 1.15.1 to 1.15.3 ([#15300](https://github.com/opensearch-project/OpenSearch/pull/15300))
+- Bump `com.gradle.develocity` from 3.17.6 to 3.18 ([#15297](https://github.com/opensearch-project/OpenSearch/pull/15297))
+- Bump `commons-cli:commons-cli` from 1.8.0 to 1.9.0 ([#15298](https://github.com/opensearch-project/OpenSearch/pull/15298))
+- Bump `opentelemetry` from 1.40.0 to 1.41.0 ([#15361](https://github.com/opensearch-project/OpenSearch/pull/15361))
+- Bump `opentelemetry-semconv` from 1.26.0-alpha to 1.27.0-alpha ([#15361](https://github.com/opensearch-project/OpenSearch/pull/15361))
+- Bump `tj-actions/changed-files` from 44 to 45 ([#15422](https://github.com/opensearch-project/OpenSearch/pull/15422))
+- Bump `dnsjava:dnsjava` from 3.6.0 to 3.6.1 ([#15418](https://github.com/opensearch-project/OpenSearch/pull/15418))
+- Bump `com.netflix.nebula.ospackage-base` from 11.9.1 to 11.10.0 ([#15419](https://github.com/opensearch-project/OpenSearch/pull/15419))
+- Bump `org.roaringbitmap:RoaringBitmap` from 1.1.0 to 1.2.1 ([#15423](https://github.com/opensearch-project/OpenSearch/pull/15423))
+- Bump `icu4j` from 70.1 to 75.1 ([#15469](https://github.com/opensearch-project/OpenSearch/pull/15469))
### Changed
-- Add ability for Boolean and date field queries to run when only doc_values are enabled ([#11650](https://github.com/opensearch-project/OpenSearch/pull/11650))
-- Refactor implementations of query phase searcher, allow QueryCollectorContext to have zero collectors ([#13481](https://github.com/opensearch-project/OpenSearch/pull/13481))
-- Adds support to inject telemetry instances to plugins ([#13636](https://github.com/opensearch-project/OpenSearch/pull/13636))
+- Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979))
+- Optimize regexp-based include/exclude on aggregations when pattern matches prefixes ([#14371](https://github.com/opensearch-project/OpenSearch/pull/14371))
+- Replace and block usages of org.apache.logging.log4j.util.Strings ([#15238](https://github.com/opensearch-project/OpenSearch/pull/15238))
+- Remote publication using minimum node version for backward compatibility ([#15216](https://github.com/opensearch-project/OpenSearch/pull/15216))
+
### Deprecated
### Removed
-- Remove handling of index.mapper.dynamic in AutoCreateIndex([#13067](https://github.com/opensearch-project/OpenSearch/pull/13067))
+- Remove some unused code in the search backpressure package ([#15518](https://github.com/opensearch-project/OpenSearch/pull/15518))
### Fixed
-- Fix negative RequestStats metric issue ([#13553](https://github.com/opensearch-project/OpenSearch/pull/13553))
-- Fix get field mapping API returns 404 error in mixed cluster with multiple versions ([#13624](https://github.com/opensearch-project/OpenSearch/pull/13624))
-- Allow clearing `remote_store.compatibility_mode` setting ([#13646](https://github.com/opensearch-project/OpenSearch/pull/13646))
+- Fix constraint bug which allows more primary shards than average primary shards per index ([#14908](https://github.com/opensearch-project/OpenSearch/pull/14908))
+- Fix NPE when bulk ingest with empty pipeline ([#15033](https://github.com/opensearch-project/OpenSearch/pull/15033))
+- Fix missing value of FieldSort for unsigned_long ([#14963](https://github.com/opensearch-project/OpenSearch/pull/14963))
+- Fix delete index template failed when the index template matches a data stream but is unused ([#15080](https://github.com/opensearch-project/OpenSearch/pull/15080))
+- Fix array_index_out_of_bounds_exception when indexing documents with field name containing only dot ([#15126](https://github.com/opensearch-project/OpenSearch/pull/15126))
+- Fixed array field name omission in flat_object function for nested JSON ([#13620](https://github.com/opensearch-project/OpenSearch/pull/13620))
+- Fix incorrect parameter names in MinHash token filter configuration handling ([#15233](https://github.com/opensearch-project/OpenSearch/pull/15233))
+- Fix range aggregation optimization ignoring top level queries ([#15287](https://github.com/opensearch-project/OpenSearch/pull/15287))
+- Fix indexing error when flat_object field is explicitly null ([#15375](https://github.com/opensearch-project/OpenSearch/pull/15375))
+- Fix split response processor not included in allowlist ([#15393](https://github.com/opensearch-project/OpenSearch/pull/15393))
+- Fix unchecked cast in dynamic action map getter ([#15394](https://github.com/opensearch-project/OpenSearch/pull/15394))
+- Fix null values indexed as "null" strings in flat_object field ([#14069](https://github.com/opensearch-project/OpenSearch/pull/14069))
+- Fix terms query on wildcard field returns nothing ([#15607](https://github.com/opensearch-project/OpenSearch/pull/15607))
### Security
-[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.13...2.x
+[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.16...2.x
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
index 6855281a488ca..4a8aa9305df74 100644
--- a/MAINTAINERS.md
+++ b/MAINTAINERS.md
@@ -5,20 +5,24 @@ This document contains a list of maintainers in this repo. See [opensearch-proje
## Current Maintainers
| Maintainer | GitHub ID | Affiliation |
-| ------------------------ | ------------------------------------------------------- | ----------- |
+|--------------------------|---------------------------------------------------------|-------------|
| Anas Alkouz | [anasalkouz](https://github.com/anasalkouz) | Amazon |
| Andrew Ross | [andrross](https://github.com/andrross) | Amazon |
| Andriy Redko | [reta](https://github.com/reta) | Aiven |
+| Ankit Jain | [jainankitk](https://github.com/jainankitk) | Amazon |
+| Ashish Singh | [ashking94](https://github.com/ashking94) | Amazon |
| Bukhtawar Khan | [Bukhtawar](https://github.com/Bukhtawar) | Amazon |
| Charlotte Henkle | [CEHENKLE](https://github.com/CEHENKLE) | Amazon |
| Dan Widdis | [dbwiddis](https://github.com/dbwiddis) | Amazon |
| Daniel "dB." Doubrovkine | [dblock](https://github.com/dblock) | Amazon |
+| Gao Binlong | [gaobinlong](https://github.com/gaobinlong) | Amazon |
| Gaurav Bafna | [gbbafna](https://github.com/gbbafna) | Amazon |
| Jay Deng | [jed326](https://github.com/jed326) | Amazon |
| Kunal Kotwani | [kotwanikunal](https://github.com/kotwanikunal) | Amazon |
+| Varun Bansal | [linuxpi](https://github.com/linuxpi) | Amazon |
| Marc Handalian | [mch2](https://github.com/mch2) | Amazon |
| Michael Froh | [msfroh](https://github.com/msfroh) | Amazon |
-| Nick Knize | [nknize](https://github.com/nknize) | Amazon |
+| Nick Knize | [nknize](https://github.com/nknize) | Lucenia |
| Owais Kazi | [owaiskazi19](https://github.com/owaiskazi19) | Amazon |
| Peter Nied | [peternied](https://github.com/peternied) | Amazon |
| Rishikesh Pasham | [Rishikesh1159](https://github.com/Rishikesh1159) | Amazon |
@@ -26,18 +30,18 @@ This document contains a list of maintainers in this repo. See [opensearch-proje
| Sarat Vemulapalli | [saratvemulapalli](https://github.com/saratvemulapalli) | Amazon |
| Shweta Thareja | [shwetathareja](https://github.com/shwetathareja) | Amazon |
| Sorabh Hamirwasia | [sohami](https://github.com/sohami) | Amazon |
-| Suraj Singh | [dreamer-89](https://github.com/dreamer-89) | Amazon |
-| Tianli Feng | [tlfeng](https://github.com/tlfeng) | Amazon |
| Vacha Shah | [VachaShah](https://github.com/VachaShah) | Amazon |
## Emeritus
-| Maintainer | GitHub ID | Affiliation |
-| --------------------- | ----------------------------------------- | ----------- |
-| Megha Sai Kavikondala | [meghasaik](https://github.com/meghasaik) | Amazon |
-| Xue Zhou | [xuezhou25](https://github.com/xuezhou25) | Amazon |
-| Kartik Ganesh | [kartg](https://github.com/kartg) | Amazon |
-| Abbas Hussain | [abbashus](https://github.com/abbashus) | Meta |
-| Himanshu Setia | [setiah](https://github.com/setiah) | Amazon |
-| Ryan Bogan | [ryanbogan](https://github.com/ryanbogan) | Amazon |
-| Rabi Panda | [adnapibar](https://github.com/adnapibar) | Independent |
+| Maintainer | GitHub ID | Affiliation |
+| ---------------------- |-------------------------------------------- | ----------- |
+| Megha Sai Kavikondala | [meghasaik](https://github.com/meghasaik) | Amazon |
+| Xue Zhou | [xuezhou25](https://github.com/xuezhou25) | Amazon |
+| Kartik Ganesh | [kartg](https://github.com/kartg) | Amazon |
+| Abbas Hussain | [abbashus](https://github.com/abbashus) | Meta |
+| Himanshu Setia | [setiah](https://github.com/setiah) | Amazon |
+| Ryan Bogan | [ryanbogan](https://github.com/ryanbogan) | Amazon |
+| Rabi Panda | [adnapibar](https://github.com/adnapibar) | Independent |
+| Tianli Feng | [tlfeng](https://github.com/tlfeng) | Amazon |
+| Suraj Singh | [dreamer-89](https://github.com/dreamer-89) | Amazon |
diff --git a/TESTING.md b/TESTING.md
index 85fc889270955..b6e7354225c7e 100644
--- a/TESTING.md
+++ b/TESTING.md
@@ -17,6 +17,8 @@ OpenSearch uses [jUnit](https://junit.org/junit5/) for testing, it also uses ran
- [Miscellaneous](#miscellaneous)
- [Running verification tasks](#running-verification-tasks)
- [Testing the REST layer](#testing-the-rest-layer)
+ - [Running REST Tests Against An External Cluster](#running-rest-tests-against-an-external-cluster)
+ - [Debugging REST Tests](#debugging-rest-tests)
- [Testing packaging](#testing-packaging)
- [Testing packaging on Windows](#testing-packaging-on-windows)
- [Testing VMs are disposable](#testing-vms-are-disposable)
@@ -32,6 +34,9 @@ OpenSearch uses [jUnit](https://junit.org/junit5/) for testing, it also uses ran
- [Bad practices](#bad-practices)
- [Use randomized-testing for coverage](#use-randomized-testing-for-coverage)
- [Abuse randomization in multi-threaded tests](#abuse-randomization-in-multi-threaded-tests)
+ - [Use `Thread.sleep`](#use-threadsleep)
+ - [Expect a specific segment topology](#expect-a-specific-segment-topology)
+ - [Leave environment in an unstable state after test](#leave-environment-in-an-unstable-state-after-test)
- [Test coverage analysis](#test-coverage-analysis)
- [Building with extra plugins](#building-with-extra-plugins)
- [Environment misc](#environment-misc)
@@ -87,21 +92,23 @@ This will instruct all JVMs (including any that run cli tools such as creating t
## Test case filtering
-- `tests.class` is a class-filtering shell-like glob pattern
-- `tests.method` is a method-filtering glob pattern.
+To be able to run a single test you need to specify the module where you're running the tests from.
+
+Example: `./gradlew server:test --tests "*.ReplicaShardBatchAllocatorTests.testNoAsyncFetchData"`
Run a single test case (variants)
- ./gradlew test -Dtests.class=org.opensearch.package.ClassName
- ./gradlew test "-Dtests.class=*.ClassName"
+ ./gradlew module:test --tests org.opensearch.package.ClassName
+ ./gradlew module:test --tests org.opensearch.package.ClassName.testName
+ ./gradlew module:test --tests "*.ClassName"
Run all tests in a package and its sub-packages
- ./gradlew test "-Dtests.class=org.opensearch.package.*"
+ ./gradlew module:test --tests "org.opensearch.package.*"
Run any test methods that contain *esi* (e.g.: .r*esi*ze.)
- ./gradlew test "-Dtests.method=*esi*"
+ ./gradlew module:test --tests "*esi*"
Run all tests that are waiting for a bugfix (disabled by default)
@@ -266,7 +273,18 @@ yamlRestTest’s and javaRestTest’s are easy to identify, since they are found
If in doubt about which command to use, simply run <gradle path>:check
-Note that the REST tests, like all the integration tests, can be run against an external cluster by specifying the `tests.cluster` property, which if present needs to contain a comma separated list of nodes to connect to (e.g. localhost:9300).
+## Running REST Tests Against An External Cluster
+
+Note that the REST tests, like all the integration tests, can be run against an external cluster by specifying the following properties `tests.cluster`, `tests.rest.cluster`, `tests.clustername`. Use a comma separated list of node properties for the multi-node cluster.
+
+For example :
+
+ ./gradlew :rest-api-spec:yamlRestTest \
+ -Dtests.cluster=localhost:9200 -Dtests.rest.cluster=localhost:9200 -Dtests.clustername=opensearch
+
+## Debugging REST Tests
+
+You can launch a local OpenSearch cluster in debug mode following [Launching and debugging from an IDE](#launching-and-debugging-from-an-ide), and run your REST tests against that following [Running REST Tests Against An External Cluster](#running-rest-tests-against-an-external-cluster).
# Testing packaging
@@ -431,7 +449,7 @@ Unit tests are the preferred way to test some functionality: most of the time th
The reason why `OpenSearchSingleNodeTestCase` exists is that all our components used to be very hard to set up in isolation, which had led us to having a number of integration tests but close to no unit tests. `OpenSearchSingleNodeTestCase` is a workaround for this issue which provides an easy way to spin up a node and get access to components that are hard to instantiate like `IndicesService`. Whenever practical, you should prefer unit tests.
-Finally, if the the functionality under test needs to be run in a cluster, there are two test classes to consider:
+Finally, if the functionality under test needs to be run in a cluster, there are two test classes to consider:
* `OpenSearchRestTestCase` will connect to an external cluster. This is a good option if the tests cases don't rely on a specific configuration of the test cluster. A test cluster is set up as part of the Gradle task running integration tests, and test cases using this class can connect to it. The configuration of the cluster is provided in the Gradle files.
* `OpenSearchIntegTestCase` will create a local cluster as part of each test case. The configuration of the cluster is controlled by the test class. This is a good option if different tests cases depend on different cluster configurations, as it would be impractical (and limit parallelization) to keep re-configuring (and re-starting) the external cluster for each test case. A good example of when this class might come in handy is for testing security features, where different cluster configurations are needed to fully test each one.
@@ -453,6 +471,27 @@ However, it should not be used for coverage. For instance if you are testing a p
Multi-threaded tests are often not reproducible due to the fact that there is no guarantee on the order in which operations occur across threads. Adding randomization to the mix usually makes things worse and should be done with care.
+### Use `Thread.sleep`
+
+`Thread.sleep()` is almost always a bad idea because it is very difficult to know that you've waited long enough. Using primitives like `waitUntil` or `assertBusy`, which use Thread.sleep internally, is okay to wait for a specific condition. However, it is almost always better to instrument your code with concurrency primitives like a `CountDownLatch` that will allow you to deterministically wait for a specific condition, without waiting longer than necessary that will happen with a polling approach used by `assertBusy`.
+
+Example:
+- [PrimaryShardAllocatorIT](https://github.com/opensearch-project/OpenSearch/blob/7ffcd6500e0bd5956cef5c289ee66d9f99d533fc/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java#L208-L235): This test is using two latches: one to wait for a recovery to start and one to block that recovery so that it can deterministically test things that happen during a recovery.
+
+### Expect a specific segment topology
+
+By design, OpenSearch integration tests will vary how the merge policy works because in almost all scenarios you should not depend on a specific segment topology (in the real world your code will see a huge diversity of indexing workloads with OpenSearch merging things in the background all the time!). If you do in fact need to care about the segment topology (e.g. for testing statistics that might vary slightly depending on number of segments), then you must take care to ensure that segment topology is deterministic by doing things like disabling background refreshes, force merging after indexing data, etc.
+
+Example:
+- [SegmentReplicationResizeRequestIT](https://github.com/opensearch-project/OpenSearch/blob/f715ee1a485e550802accc1c2e3d8101208d4f0b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java#L102-L109): This test disables refreshes to prevent interfering with the segment replication behavior under test.
+
+### Leave environment in an unstable state after test
+
+The default test case will ensure that no open file handles or running threads are left after tear down. You must ensure that all resources are cleaned up at the end of each test case, or else the cleanup may end up racing with the tear down logic in the base test class in a way that is very difficult to reproduce.
+
+Example:
+- [AwarenessAttributeDecommissionIT](https://github.com/opensearch-project/OpenSearch/blob/main/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/AwarenessAttributeDecommissionIT.java#L951): Recommissions any decommissioned nodes at the end of the test to ensure the after-test checks succeed.
+
# Test coverage analysis
The code coverage report can be generated through Gradle with [JaCoCo plugin](https://docs.gradle.org/current/userguide/jacoco_plugin.html).
diff --git a/TRIAGING.md b/TRIAGING.md
new file mode 100644
index 0000000000000..90842cd8e9393
--- /dev/null
+++ b/TRIAGING.md
@@ -0,0 +1,87 @@
+
+
+The maintainers of the OpenSearch Repo seek to promote an inclusive and engaged community of contributors. In order to facilitate this, weekly triage meetings are open-to-all and attendance is encouraged for anyone who hopes to contribute, discuss an issue, or learn more about the project. There are several weekly triage meetings scoped to the following component areas: Search, Storage, Cluster Manager, and finally "Core" as a catch-all for all other issues. To learn more about contributing to the OpenSearch Repo visit the [Contributing](./CONTRIBUTING.md) documentation.
+
+### Do I need to attend for my issue to be addressed/triaged?
+
+Attendance is not required for your issue to be triaged or addressed. If not accepted the issue will be updated with a comment for next steps. All new issues are triaged weekly.
+
+You can track if your issue was triaged by watching your GitHub notifications for updates.
+
+### What happens if my issue does not get covered this time?
+
+Each meeting we seek to address all new issues. However, should we run out of time before your issue is discussed, you are always welcome to attend the next meeting or to follow up on the issue post itself.
+
+### How do I join a Triage meeting?
+
+ Check the [OpenSearch Meetup Group](https://www.meetup.com/opensearch/) for the latest schedule and details for joining each meeting. Each component area has its own meetup series: [Search](https://www.meetup.com/opensearch/events/300929493/), [Storage](https://www.meetup.com/opensearch/events/299907409/), [Cluster Manager](https://www.meetup.com/opensearch/events/301082218/), and [Core](https://www.meetup.com/opensearch/events/301061009/).
+
+After joining the virtual meeting, you can enable your video / voice to join the discussion. If you do not have a webcam or microphone available, you can still join in via the text chat.
+
+If you have an issue you'd like to bring forth please prepare a link to the issue so it can be presented and viewed by everyone in the meeting.
+
+### Is there an agenda for each week?
+
+Meeting structure may vary slightly, but the general structure is as follows:
+
+1. **Initial Gathering:** Feel free to turn on your video and engage in informal conversation. Shortly, a volunteer triage [facilitator](#what-is-the-role-of-the-facilitator) will begin the meeting and share their screen.
+2. **Record Attendees:** The facilitator will request attendees to share their GitHub profile links. These links will be collected and assembled into a [tag](#how-do-triage-facilitator-tag-comments-during-the-triage-meeting) to annotate comments during the meeting.
+3. **Announcements:** Any announcements will be made at the beginning of the meeting.
+4. **Review of New Issues:** We start by reviewing all untriaged issues. Each meeting has a label-based search to find relevant issues:
+ - [Search](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3Auntriaged+label%3A%22Search%22%2C%22Search%3ARemote+Search%22%2C%22Search%3AResiliency%22%2C%22Search%3APerformance%22%2C%22Search%3ARelevance%22%2C%22Search%3AAggregations%22%2C%22Search%3AQuery+Capabilities%22%2C%22Search%3AQuery+Insights%22%2C%22Search%3ASearchable+Snapshots%22%2C%22Search%3AUser+Behavior+Insights%22)
+ - [Storage](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3Auntriaged+label%3AStorage%2C%22Storage%3AResiliency%22%2C%22Storage%3APerformance%22%2C%22Storage%3ASnapshots%22%2C%22Storage%3ARemote%22%2C%22Storage%3ADurability%22)
+ - [Cluster Manager](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3Auntriaged+label%3A%22Cluster+Manager%22%2C%22ClusterManager%3ARemoteState%22)
+ - [Core](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3Auntriaged+-label%3A%22Search%22%2C%22Search%3ARemote+Search%22%2C%22Search%3AResiliency%22%2C%22Search%3APerformance%22%2C%22Search%3ARelevance%22%2C%22Search%3AAggregations%22%2C%22Search%3AQuery+Capabilities%22%2C%22Search%3AQuery+Insights%22%2C%22Search%3ASearchable+Snapshots%22%2C%22Search%3AUser+Behavior+Insights%22%2C%22Storage%22%2C%22Storage%3AResiliency%22%2C%22Storage%3APerformance%22%2C%22Storage%3ASnapshots%22%2C%22Storage%3ARemote%22%2C%22Storage%3ADurability%22%2C%22Cluster+Manager%22%2C%22ClusterManager%3ARemoteState%22)
+5. **Attendee Requests:** An opportunity for any meeting member to request consideration of an issue or pull request.
+6. **Open Discussion:** Attendees can bring up any topics not already covered by filed issues or pull requests.
+7. **Review of Old Untriaged Issues:** Time permitting, each meeting will look at all [untriaged issues older than 14 days](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3Auntriaged+created%3A%3C2024-05-20) to prevent issues from falling through the cracks (note the GitHub API does not allow for relative times, so the date in this search must be updated every meeting).
+
+### What is the role of the facilitator?
+
+The facilitator is crucial in driving the meeting, ensuring a smooth flow of issues into OpenSearch for future contributions. They maintain the meeting's agenda, solicit input from attendees, and record outcomes using the triage tag as items are discussed.
+
+### Do I need to have already contributed to the project to attend a triage meeting?
+
+No prior contributions are required. All interested individuals are welcome and encouraged to attend. Triage meetings offer a fantastic opportunity for new contributors to understand the project and explore various contribution avenues.
+
+### What if I have an issue that is almost a duplicate, should I open a new one to be triaged?
+
+You can always open an [issue](https://github.com/opensearch-project/OpenSearch/issues/new/choose) including one that you think may be a duplicate. If you believe your issue is similar but distinct from an existing one, you are encouraged to file it and explain the differences during the triage meeting.
+
+### What if I have follow-up questions on an issue?
+
+If you have an existing issue you would like to discuss, you can always comment on the issue itself. Alternatively, you are welcome to come to the triage meeting to discuss.
+
+### Is this meeting a good place to get help setting up features on my OpenSearch instance?
+
+While we are always happy to help the community, the best resource for implementation questions is [the OpenSearch forum](https://forum.opensearch.org/).
+
+There you can find answers to many common questions as well as speak with implementation experts.
+
+### What are the issue labels associated with triaging?
+
+Yes, there are several labels that are used to identify the 'state' of issues filed in OpenSearch .
+| Label | When Applied | Meaning |
+|---------------|----------------------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| `Untriaged` | When issues are created or re-opened. | Issues labeled as 'Untriaged' require the attention of the repository maintainers and may need to be prioritized for quicker resolution. It's crucial to keep the count of 'Untriaged' labels low to ensure all potential security issues are addressed in a timely manner. See [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) for more details on handling these issues. |
+| `Help Wanted` | Anytime. | Issues marked as 'Help Wanted' signal that they are actionable and not the current focus of the project maintainers. Community contributions are especially encouraged for these issues. |
+| `Good First Issue` | Anytime. | Issues labeled as 'Good First Issue' are small in scope and can be resolved with a single pull request. These are recommended starting points for newcomers looking to make their first contributions. |
+
+### What are the typical outcomes of a triaged issue?
+
+| Outcome | Label | Description | Canned Response |
+|--------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Accepted | `-untriaged` | The issue has the details needed to be directed towards area owners. | "Thanks for filing this issue, please feel free to submit a pull request." |
+| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to open a new issue after addressing the reason." |
+| Area Triage | `+{AREALABEL}` | OpenSearch has many different areas. If it's unclear whether an issue should be accepted, it will be labeled with the area and an owner will be @mentioned for follow-up. | "Thanks for creating this issue; the triage meeting was unsure if this issue should be accepted, @{PERSON} or someone from the area please review and then accept or reject this issue?" |
+| Transfer | N/A | If the issue applies to another repository within the OpenSearch Project, it will be transferred accordingly. | "@opensearch-project/triage, can you please transfer this issue to project {REPOSITORY}." Or, if someone at the meeting has permissions, they can start the transfer. |
+
+### Is this where I should bring up potential security vulnerabilities?
+
+Due to the sensitive nature of security vulnerabilities, please report all potential vulnerabilities directly by following the steps outlined on the [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) document.
+
+### How do triage facilitator tag comments during the triage meeting?
+
+During the triage meeting, facilitators should use the tag _[Triage - attendees [1](#Profile_link) [2](#Profile_link)]_ to indicate a collective decision. This ensures contributors know the decision came from the meeting rather than an individual and identifies participants for any follow-up queries.
+
+This tag should not be used outside triage meetings.
diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/routing/allocation/RerouteBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/routing/allocation/RerouteBenchmark.java
new file mode 100644
index 0000000000000..e54bca579423b
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/benchmark/routing/allocation/RerouteBenchmark.java
@@ -0,0 +1,135 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.benchmark.routing.allocation;
+
+import org.opensearch.Version;
+import org.opensearch.cluster.ClusterName;
+import org.opensearch.cluster.ClusterState;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.cluster.metadata.Metadata;
+import org.opensearch.cluster.node.DiscoveryNodes;
+import org.opensearch.cluster.routing.RoutingTable;
+import org.opensearch.cluster.routing.ShardRouting;
+import org.opensearch.cluster.routing.allocation.AllocationService;
+import org.opensearch.common.logging.LogConfigurator;
+import org.opensearch.common.settings.Settings;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import static org.opensearch.cluster.routing.ShardRoutingState.INITIALIZING;
+
+@Fork(1)
+@Warmup(iterations = 3)
+@Measurement(iterations = 3)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@State(Scope.Benchmark)
+@SuppressWarnings("unused") // invoked by benchmarking framework
+public class RerouteBenchmark {
+ @Param({
+ // indices| nodes
+ " 10000| 500|", })
+ public String indicesNodes = "1|1";
+ public int numIndices;
+ public int numNodes;
+ public int numShards = 10;
+ public int numReplicas = 1;
+
+ private AllocationService allocationService;
+ private ClusterState initialClusterState;
+
+ @Setup
+ public void setUp() throws Exception {
+ LogConfigurator.setNodeName("test");
+ final String[] params = indicesNodes.split("\\|");
+ numIndices = toInt(params[0]);
+ numNodes = toInt(params[1]);
+
+ int totalShardCount = (numReplicas + 1) * numShards * numIndices;
+ Metadata.Builder mb = Metadata.builder();
+ for (int i = 1; i <= numIndices; i++) {
+ mb.put(
+ IndexMetadata.builder("test_" + i)
+ .settings(Settings.builder().put("index.version.created", Version.CURRENT))
+ .numberOfShards(numShards)
+ .numberOfReplicas(numReplicas)
+ );
+ }
+
+ Metadata metadata = mb.build();
+ RoutingTable.Builder rb = RoutingTable.builder();
+ for (int i = 1; i <= numIndices; i++) {
+ rb.addAsNew(metadata.index("test_" + i));
+ }
+ RoutingTable routingTable = rb.build();
+ initialClusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
+ .metadata(metadata)
+ .routingTable(routingTable)
+ .nodes(setUpClusterNodes(numNodes))
+ .build();
+ }
+
+ @Benchmark
+ public ClusterState measureShardAllocationEmptyCluster() throws Exception {
+ ClusterState clusterState = initialClusterState;
+ allocationService = Allocators.createAllocationService(
+ Settings.builder()
+ .put("cluster.routing.allocation.awareness.attributes", "zone")
+ .put("cluster.routing.allocation.load_awareness.provisioned_capacity", numNodes)
+ .put("cluster.routing.allocation.load_awareness.skew_factor", "50")
+ .put("cluster.routing.allocation.node_concurrent_recoveries", "2")
+ .build()
+ );
+ clusterState = allocationService.reroute(clusterState, "reroute");
+ while (clusterState.getRoutingNodes().hasUnassignedShards()) {
+ clusterState = startInitializingShardsAndReroute(allocationService, clusterState);
+ }
+ return clusterState;
+ }
+
+ private int toInt(String v) {
+ return Integer.valueOf(v.trim());
+ }
+
+ private DiscoveryNodes.Builder setUpClusterNodes(int nodes) {
+ DiscoveryNodes.Builder nb = DiscoveryNodes.builder();
+ for (int i = 1; i <= nodes; i++) {
+ Map attributes = new HashMap<>();
+ attributes.put("zone", "zone_" + (i % 3));
+ nb.add(Allocators.newNode("node_0_" + i, attributes));
+ }
+ return nb;
+ }
+
+ private static ClusterState startInitializingShardsAndReroute(AllocationService allocationService, ClusterState clusterState) {
+ return startShardsAndReroute(allocationService, clusterState, clusterState.routingTable().shardsWithState(INITIALIZING));
+ }
+
+ private static ClusterState startShardsAndReroute(
+ AllocationService allocationService,
+ ClusterState clusterState,
+ List initializingShards
+ ) {
+ return allocationService.reroute(allocationService.applyStartedShards(clusterState, initializingShards), "reroute after starting");
+ }
+}
diff --git a/build.gradle b/build.gradle
index 80e0b0dc95d4d..a0a253fc00bd9 100644
--- a/build.gradle
+++ b/build.gradle
@@ -55,7 +55,6 @@ plugins {
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
id "com.diffplug.spotless" version "6.25.0" apply false
- id "org.gradle.test-retry" version "1.5.8" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
}
@@ -71,6 +70,13 @@ apply from: 'gradle/run.gradle'
apply from: 'gradle/missing-javadoc.gradle'
apply from: 'gradle/code-coverage.gradle'
+// Disable unconditional publishing of build scans
+develocity {
+ buildScan {
+ publishing.onlyIf { false }
+ }
+}
+
// common maven publishing configuration
allprojects {
group = 'org.opensearch'
@@ -454,9 +460,8 @@ gradle.projectsEvaluated {
// test retry configuration
subprojects {
- apply plugin: "org.gradle.test-retry"
tasks.withType(Test).configureEach {
- retry {
+ develocity.testRetry {
if (BuildParams.isCi()) {
maxRetries = 3
maxFailures = 10
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index b41997d7fc813..4ebcd5ea57911 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -127,7 +127,7 @@ dependencies {
testFixturesApi "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
testFixturesApi gradleApi()
testFixturesApi gradleTestKit()
- testImplementation 'org.wiremock:wiremock-standalone:3.3.1'
+ testImplementation 'org.wiremock:wiremock-standalone:3.6.0'
testImplementation "org.mockito:mockito-core:${props.getProperty('mockito')}"
integTestImplementation('org.spockframework:spock-core:2.3-groovy-3.0') {
exclude module: "groovy"
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java
index 448ba8a96ef02..570ab4a9f70e1 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java
@@ -199,7 +199,28 @@ private JavaVersion determineJavaVersion(String description, File javaHome, Java
}
private JvmInstallationMetadata getJavaInstallation(File javaHome) {
- final InstallationLocation location = new InstallationLocation(javaHome, "Java home");
+ InstallationLocation location = null;
+
+ try {
+ try {
+ // The InstallationLocation(File, String) is used by Gradle pre-8.8
+ location = (InstallationLocation) MethodHandles.publicLookup()
+ .findConstructor(InstallationLocation.class, MethodType.methodType(void.class, File.class, String.class))
+ .invokeExact(javaHome, "Java home");
+ } catch (Throwable ex) {
+ // The InstallationLocation::userDefined is used by Gradle post-8.7
+ location = (InstallationLocation) MethodHandles.publicLookup()
+ .findStatic(
+ InstallationLocation.class,
+ "userDefined",
+ MethodType.methodType(InstallationLocation.class, File.class, String.class)
+ )
+ .invokeExact(javaHome, "Java home");
+
+ }
+ } catch (Throwable ex) {
+ throw new IllegalStateException("Unable to find suitable InstallationLocation constructor / factory method", ex);
+ }
try {
try {
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java
index 7ab91448252f2..a7f720855951a 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java
@@ -148,8 +148,8 @@ private void configureGeneralTaskDefaults(Project project) {
project.getTasks().withType(AbstractCopyTask.class).configureEach(t -> {
t.dependsOn(project.getTasks().withType(EmptyDirTask.class));
t.setIncludeEmptyDirs(true);
- t.setDirMode(0755);
- t.setFileMode(0644);
+ t.dirPermissions(perms -> perms.unix(0755));
+ t.filePermissions(perms -> perms.unix(0644));
});
// common config across all archives
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java
index 6892af1b17f97..0502280cb69ad 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionBwcSetupPlugin.java
@@ -158,7 +158,17 @@ private static List resolveArchiveProjects(File checkoutDir
projects.addAll(asList("deb", "rpm"));
if (bwcVersion.onOrAfter("7.0.0")) { // starting with 7.0 we bundle a jdk which means we have platform-specific archives
- projects.addAll(asList("darwin-tar", "linux-tar", "windows-zip"));
+ projects.addAll(
+ asList(
+ "darwin-tar",
+ "darwin-arm64-tar",
+ "linux-tar",
+ "linux-arm64-tar",
+ "linux-ppc64le-tar",
+ "linux-s390x-tar",
+ "windows-zip"
+ )
+ );
} else { // prior to 7.0 we published only a single zip and tar archives
projects.addAll(asList("zip", "tar"));
}
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesPrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesPrecommitPlugin.java
index d4dcde9d63087..28a344de31ddb 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesPrecommitPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesPrecommitPlugin.java
@@ -33,11 +33,14 @@
package org.opensearch.gradle.precommit;
import org.opensearch.gradle.dependencies.CompileOnlyResolvePlugin;
+import org.opensearch.gradle.util.GradleUtils;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.ProjectDependency;
+import org.gradle.api.file.FileCollection;
import org.gradle.api.plugins.JavaPlugin;
+import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.TaskProvider;
public class DependencyLicensesPrecommitPlugin extends PrecommitPlugin {
@@ -48,15 +51,16 @@ public TaskProvider extends Task> createTask(Project project) {
TaskProvider dependencyLicenses = project.getTasks()
.register("dependencyLicenses", DependencyLicensesTask.class);
+ final Configuration runtimeClasspath = project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);
+ final Configuration compileOnly = project.getConfigurations()
+ .getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME);
+ final Provider provider = project.provider(
+ () -> GradleUtils.getFiles(project, runtimeClasspath, dependency -> dependency instanceof ProjectDependency == false)
+ .minus(compileOnly)
+ );
+
// only require dependency licenses for non-opensearch deps
- dependencyLicenses.configure(t -> {
- Configuration runtimeClasspath = project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME);
- Configuration compileOnly = project.getConfigurations()
- .getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME);
- t.setDependencies(
- runtimeClasspath.fileCollection(dependency -> dependency instanceof ProjectDependency == false).minus(compileOnly)
- );
- });
+ dependencyLicenses.configure(t -> t.getDependencies().set(provider));
// we also create the updateShas helper task that is associated with dependencyLicenses
project.getTasks().register("updateShas", UpdateShasTask.class, t -> t.setParentTask(dependencyLicenses));
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesTask.java
index e801681c5c386..7248e0bc14431 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/DependencyLicensesTask.java
@@ -39,6 +39,7 @@
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
+import org.gradle.api.provider.Property;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputDirectory;
import org.gradle.api.tasks.InputFiles;
@@ -121,7 +122,7 @@ public class DependencyLicensesTask extends DefaultTask {
/**
* A collection of jar files that should be checked.
*/
- private FileCollection dependencies;
+ private Property dependenciesProvider;
/**
* The directory to find the license and sha files in.
@@ -158,12 +159,11 @@ public void mapping(Map props) {
}
@InputFiles
- public FileCollection getDependencies() {
- return dependencies;
- }
-
- public void setDependencies(FileCollection dependencies) {
- this.dependencies = dependencies;
+ public Property getDependencies() {
+ if (dependenciesProvider == null) {
+ dependenciesProvider = getProject().getObjects().property(FileCollection.class);
+ }
+ return dependenciesProvider;
}
@Optional
@@ -190,6 +190,11 @@ public void ignoreSha(String dep) {
@TaskAction
public void checkDependencies() throws IOException, NoSuchAlgorithmException {
+ if (dependenciesProvider == null) {
+ throw new GradleException("No dependencies variable defined.");
+ }
+
+ final FileCollection dependencies = dependenciesProvider.get();
if (dependencies == null) {
throw new GradleException("No dependencies variable defined.");
}
@@ -226,7 +231,7 @@ public void checkDependencies() throws IOException, NoSuchAlgorithmException {
}
}
- checkDependencies(licenses, notices, sources, shaFiles);
+ checkDependencies(dependencies, licenses, notices, sources, shaFiles);
licenses.forEach((item, exists) -> failIfAnyMissing(item, exists, "license"));
@@ -255,6 +260,7 @@ private void failIfAnyMissing(String item, Boolean exists, String type) {
}
private void checkDependencies(
+ FileCollection dependencies,
Map licenses,
Map notices,
Map sources,
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java
index 9e740b4e061cf..984308ff68a8c 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditTask.java
@@ -37,6 +37,7 @@
import org.opensearch.gradle.LoggedExec;
import org.opensearch.gradle.OS;
import org.opensearch.gradle.dependencies.CompileOnlyResolvePlugin;
+import org.opensearch.gradle.util.GradleUtils;
import org.gradle.api.DefaultTask;
import org.gradle.api.JavaVersion;
import org.gradle.api.artifacts.Configuration;
@@ -203,11 +204,13 @@ public Set getJarsToScan() {
// or dependencies added as `files(...)`, we can't be sure if those are third party or not.
// err on the side of scanning these to make sure we don't miss anything
Spec reallyThirdParty = dep -> dep.getGroup() != null && dep.getGroup().startsWith("org.opensearch") == false;
- Set jars = getRuntimeConfiguration().getResolvedConfiguration().getFiles(reallyThirdParty);
- Set compileOnlyConfiguration = getProject().getConfigurations()
- .getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME)
- .getResolvedConfiguration()
- .getFiles(reallyThirdParty);
+
+ Set jars = GradleUtils.getFiles(getProject(), getRuntimeConfiguration(), reallyThirdParty).getFiles();
+ Set compileOnlyConfiguration = GradleUtils.getFiles(
+ getProject(),
+ getProject().getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME),
+ reallyThirdParty
+ ).getFiles();
// don't scan provided dependencies that we already scanned, e.x. don't scan cores dependencies for every plugin
if (compileOnlyConfiguration != null) {
jars.removeAll(compileOnlyConfiguration);
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/UpdateShasTask.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/UpdateShasTask.java
index 3fe08888afb09..de479f3b560b6 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/UpdateShasTask.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/UpdateShasTask.java
@@ -66,7 +66,7 @@ public UpdateShasTask() {
public void updateShas() throws NoSuchAlgorithmException, IOException {
Set shaFiles = parentTask.get().getShaFiles();
- for (File dependency : parentTask.get().getDependencies()) {
+ for (File dependency : parentTask.get().getDependencies().get()) {
String jarName = dependency.getName();
File shaFile = parentTask.get().getShaFile(jarName);
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java b/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java
index e82d8ed73ced2..3352dda98ef66 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTar.java
@@ -184,7 +184,7 @@ private void visitSymbolicLink(final FileCopyDetailsInternal details) {
visitedSymbolicLinks.add(details.getFile());
final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString(), TarConstants.LF_SYMLINK);
entry.setModTime(getModTime(details));
- entry.setMode(UnixStat.LINK_FLAG | details.getMode());
+ entry.setMode(UnixStat.LINK_FLAG | details.getPermissions().toUnixNumeric());
try {
entry.setLinkName(Files.readSymbolicLink(details.getFile().toPath()).toString());
tar.putArchiveEntry(entry);
@@ -197,7 +197,7 @@ private void visitSymbolicLink(final FileCopyDetailsInternal details) {
private void visitDirectory(final FileCopyDetailsInternal details) {
final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString() + "/");
entry.setModTime(getModTime(details));
- entry.setMode(UnixStat.DIR_FLAG | details.getMode());
+ entry.setMode(UnixStat.DIR_FLAG | details.getPermissions().toUnixNumeric());
try {
tar.putArchiveEntry(entry);
tar.closeArchiveEntry();
@@ -209,7 +209,7 @@ private void visitDirectory(final FileCopyDetailsInternal details) {
private void visitFile(final FileCopyDetailsInternal details) {
final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString());
entry.setModTime(getModTime(details));
- entry.setMode(UnixStat.FILE_FLAG | details.getMode());
+ entry.setMode(UnixStat.FILE_FLAG | details.getPermissions().toUnixNumeric());
entry.setSize(details.getSize());
try {
tar.putArchiveEntry(entry);
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
index b2b3e3003e572..8d5ce9143cbac 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
@@ -77,9 +77,9 @@
import java.util.stream.Stream;
public class DistroTestPlugin implements Plugin {
- private static final String SYSTEM_JDK_VERSION = "21.0.3+9";
+ private static final String SYSTEM_JDK_VERSION = "21.0.4+7";
private static final String SYSTEM_JDK_VENDOR = "adoptium";
- private static final String GRADLE_JDK_VERSION = "21.0.3+9";
+ private static final String GRADLE_JDK_VERSION = "21.0.4+7";
private static final String GRADLE_JDK_VENDOR = "adoptium";
// all distributions used by distro tests. this is temporary until tests are per distribution
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java
index fcadf35593ce6..9396797536052 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/rest/RestResourcesPlugin.java
@@ -81,50 +81,52 @@ public void apply(Project project) {
// tests
Configuration testConfig = project.getConfigurations().create("restTestConfig");
project.getConfigurations().create("restTests");
+
+ if (BuildParams.isInternal()) {
+ // core
+ Dependency restTestdependency = project.getDependencies().project(new HashMap() {
+ {
+ put("path", ":rest-api-spec");
+ put("configuration", "restTests");
+ }
+ });
+ testConfig.withDependencies(s -> s.add(restTestdependency));
+ } else {
+ Dependency dependency = project.getDependencies().create("org.opensearch:rest-api-spec:" + VersionProperties.getOpenSearch());
+ testConfig.withDependencies(s -> s.add(dependency));
+ }
+
Provider copyRestYamlTestTask = project.getTasks()
.register("copyYamlTestsTask", CopyRestTestsTask.class, task -> {
task.includeCore.set(extension.restTests.getIncludeCore());
task.coreConfig = testConfig;
task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME;
- if (BuildParams.isInternal()) {
- // core
- Dependency restTestdependency = project.getDependencies().project(new HashMap() {
- {
- put("path", ":rest-api-spec");
- put("configuration", "restTests");
- }
- });
- project.getDependencies().add(task.coreConfig.getName(), restTestdependency);
- } else {
- Dependency dependency = project.getDependencies()
- .create("org.opensearch:rest-api-spec:" + VersionProperties.getOpenSearch());
- project.getDependencies().add(task.coreConfig.getName(), dependency);
- }
task.dependsOn(task.coreConfig);
});
// api
Configuration specConfig = project.getConfigurations().create("restSpec"); // name chosen for passivity
project.getConfigurations().create("restSpecs");
+
+ if (BuildParams.isInternal()) {
+ Dependency restSpecDependency = project.getDependencies().project(new HashMap() {
+ {
+ put("path", ":rest-api-spec");
+ put("configuration", "restSpecs");
+ }
+ });
+ specConfig.withDependencies(s -> s.add(restSpecDependency));
+ } else {
+ Dependency dependency = project.getDependencies().create("org.opensearch:rest-api-spec:" + VersionProperties.getOpenSearch());
+ specConfig.withDependencies(s -> s.add(dependency));
+ }
+
Provider copyRestYamlSpecTask = project.getTasks()
.register("copyRestApiSpecsTask", CopyRestApiTask.class, task -> {
task.includeCore.set(extension.restApi.getIncludeCore());
task.dependsOn(copyRestYamlTestTask);
task.coreConfig = specConfig;
task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME;
- if (BuildParams.isInternal()) {
- Dependency restSpecDependency = project.getDependencies().project(new HashMap() {
- {
- put("path", ":rest-api-spec");
- put("configuration", "restSpecs");
- }
- });
- project.getDependencies().add(task.coreConfig.getName(), restSpecDependency);
- } else {
- Dependency dependency = project.getDependencies()
- .create("org.opensearch:rest-api-spec:" + VersionProperties.getOpenSearch());
- project.getDependencies().add(task.coreConfig.getName(), dependency);
- }
task.dependsOn(task.coreConfig);
});
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java
index c9e18426966f9..e8772522b19a4 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/testfixtures/TestFixturesPlugin.java
@@ -34,6 +34,7 @@
import com.avast.gradle.dockercompose.ComposeExtension;
import com.avast.gradle.dockercompose.DockerComposePlugin;
import com.avast.gradle.dockercompose.ServiceInfo;
+import com.avast.gradle.dockercompose.tasks.ComposeBuild;
import com.avast.gradle.dockercompose.tasks.ComposeDown;
import com.avast.gradle.dockercompose.tasks.ComposePull;
import com.avast.gradle.dockercompose.tasks.ComposeUp;
@@ -200,6 +201,7 @@ public void execute(Task task) {
maybeSkipTasks(tasks, dockerSupport, getTaskClass("org.opensearch.gradle.test.RestIntegTestTask"));
maybeSkipTasks(tasks, dockerSupport, TestingConventionsTasks.class);
maybeSkipTasks(tasks, dockerSupport, getTaskClass("org.opensearch.gradle.test.AntFixture"));
+ maybeSkipTasks(tasks, dockerSupport, ComposeBuild.class);
maybeSkipTasks(tasks, dockerSupport, ComposeUp.class);
maybeSkipTasks(tasks, dockerSupport, ComposePull.class);
maybeSkipTasks(tasks, dockerSupport, ComposeDown.class);
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java b/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java
index 031fee2d1127f..428b4a16748e1 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/util/GradleUtils.java
@@ -39,12 +39,17 @@
import org.gradle.api.UnknownTaskException;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.Dependency;
+import org.gradle.api.artifacts.LenientConfiguration;
+import org.gradle.api.file.FileCollection;
+import org.gradle.api.internal.artifacts.ivyservice.ResolvedFilesCollectingVisitor;
+import org.gradle.api.internal.artifacts.ivyservice.resolveengine.artifact.SelectedArtifactSet;
import org.gradle.api.plugins.JavaBasePlugin;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.provider.Provider;
import org.gradle.api.services.BuildService;
import org.gradle.api.services.BuildServiceRegistration;
import org.gradle.api.services.BuildServiceRegistry;
+import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.TaskContainer;
@@ -53,6 +58,9 @@
import org.gradle.plugins.ide.eclipse.model.EclipseModel;
import org.gradle.plugins.ide.idea.model.IdeaModel;
+import java.lang.invoke.MethodHandle;
+import java.lang.invoke.MethodHandles;
+import java.lang.invoke.MethodType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -245,4 +253,22 @@ public static String getProjectPathFromTask(String taskPath) {
int lastDelimiterIndex = taskPath.lastIndexOf(":");
return lastDelimiterIndex == 0 ? ":" : taskPath.substring(0, lastDelimiterIndex);
}
+
+ public static FileCollection getFiles(Project project, Configuration cfg, Spec spec) {
+ final LenientConfiguration configuration = cfg.getResolvedConfiguration().getLenientConfiguration();
+ try {
+ // Using reflection here to cover the pre 8.7 releases (since those have no such APIs), the
+ // ResolverResults.LegacyResolverResults.LegacyVisitedArtifactSet::select(...) is not available
+ // on older versions.
+ final MethodHandle mh = MethodHandles.lookup()
+ .findVirtual(configuration.getClass(), "select", MethodType.methodType(SelectedArtifactSet.class, Spec.class))
+ .bindTo(configuration);
+
+ final ResolvedFilesCollectingVisitor visitor = new ResolvedFilesCollectingVisitor();
+ ((SelectedArtifactSet) mh.invoke(spec)).visitArtifacts(visitor, false);
+ return project.files(visitor.getFiles());
+ } catch (Throwable ex) {
+ return project.files(configuration.getFiles(spec));
+ }
+ }
}
diff --git a/buildSrc/src/main/resources/forbidden/opensearch-all-signatures.txt b/buildSrc/src/main/resources/forbidden/opensearch-all-signatures.txt
index f9f24fd1e2367..199e206450178 100644
--- a/buildSrc/src/main/resources/forbidden/opensearch-all-signatures.txt
+++ b/buildSrc/src/main/resources/forbidden/opensearch-all-signatures.txt
@@ -17,6 +17,9 @@
java.nio.file.Paths @ Use org.opensearch.common.io.PathUtils.get() instead.
java.nio.file.FileSystems#getDefault() @ use org.opensearch.common.io.PathUtils.getDefaultFileSystem() instead.
+joptsimple.internal.Strings @ use org.opensearch.core.common.Strings instead.
+org.apache.logging.log4j.util.Strings @ use org.opensearch.core.common.Strings instead.
+
java.nio.file.Files#getFileStore(java.nio.file.Path) @ Use org.opensearch.env.Environment.getFileStore() instead, impacted by JDK-8034057
java.nio.file.Files#isWritable(java.nio.file.Path) @ Use org.opensearch.env.Environment.isWritable() instead, impacted by JDK-8034057
diff --git a/buildSrc/src/main/resources/minimumCompilerVersion b/buildSrc/src/main/resources/minimumCompilerVersion
index 8351c19397f4f..98d9bcb75a685 100644
--- a/buildSrc/src/main/resources/minimumCompilerVersion
+++ b/buildSrc/src/main/resources/minimumCompilerVersion
@@ -1 +1 @@
-14
+17
diff --git a/buildSrc/src/test/java/org/opensearch/gradle/precommit/DependencyLicensesTaskTests.java b/buildSrc/src/test/java/org/opensearch/gradle/precommit/DependencyLicensesTaskTests.java
index bb216b27128e1..28513710470af 100644
--- a/buildSrc/src/test/java/org/opensearch/gradle/precommit/DependencyLicensesTaskTests.java
+++ b/buildSrc/src/test/java/org/opensearch/gradle/precommit/DependencyLicensesTaskTests.java
@@ -344,7 +344,7 @@ private TaskProvider createDependencyLicensesTask(Projec
.register("dependencyLicenses", DependencyLicensesTask.class, new Action() {
@Override
public void execute(DependencyLicensesTask dependencyLicensesTask) {
- dependencyLicensesTask.setDependencies(getDependencies(project));
+ dependencyLicensesTask.getDependencies().set(getDependencies(project));
final Map mappings = new HashMap<>();
mappings.put("from", "groovy-.*");
diff --git a/buildSrc/src/test/java/org/opensearch/gradle/precommit/UpdateShasTaskTests.java b/buildSrc/src/test/java/org/opensearch/gradle/precommit/UpdateShasTaskTests.java
index 2deabb752017a..15d6d6cd4c31c 100644
--- a/buildSrc/src/test/java/org/opensearch/gradle/precommit/UpdateShasTaskTests.java
+++ b/buildSrc/src/test/java/org/opensearch/gradle/precommit/UpdateShasTaskTests.java
@@ -102,7 +102,7 @@ public void whenDependencyExistsButShaNotThenShouldCreateNewShaFile() throws IOE
public void whenDependencyAndWrongShaExistsThenShouldNotOverwriteShaFile() throws IOException, NoSuchAlgorithmException {
project.getDependencies().add("someCompileConfiguration", dependency);
- File groovyJar = task.getParentTask().getDependencies().getFiles().iterator().next();
+ File groovyJar = task.getParentTask().getDependencies().get().getFiles().iterator().next();
String groovyShaName = groovyJar.getName() + ".sha1";
File groovySha = createFileIn(getLicensesDir(project), groovyShaName, "content");
@@ -162,7 +162,7 @@ private TaskProvider createDependencyLicensesTask(Projec
.register("dependencyLicenses", DependencyLicensesTask.class, new Action() {
@Override
public void execute(DependencyLicensesTask dependencyLicensesTask) {
- dependencyLicensesTask.setDependencies(getDependencies(project));
+ dependencyLicensesTask.getDependencies().set(getDependencies(project));
}
});
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 96d4f2a39f66b..fce5e4a194837 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,16 +1,16 @@
-opensearch = 2.15.0
-lucene = 9.10.0
+opensearch = 2.18.0
+lucene = 9.11.1
bundled_jdk_vendor = adoptium
-bundled_jdk = 21.0.3+9
+bundled_jdk = 21.0.4+7
# optional dependencies
spatial4j = 0.7
jts = 1.15.0
-jackson = 2.17.1
-jackson_databind = 2.17.1
+jackson = 2.17.2
+jackson_databind = 2.17.2
snakeyaml = 2.1
-icu4j = 70.1
+icu4j = 75.1
supercsv = 2.4.0
log4j = 2.21.0
slf4j = 1.7.36
@@ -23,16 +23,18 @@ guava = 32.1.1-jre
protobuf = 3.22.3
jakarta_annotation = 1.3.5
google_http_client = 1.44.1
+tdigest = 3.2
+hdrhistogram = 2.2.2
# when updating the JNA version, also update the version in buildSrc/build.gradle
jna = 5.13.0
-netty = 4.1.110.Final
+netty = 4.1.112.Final
joda = 2.12.7
# project reactor
-reactor_netty = 1.1.19
-reactor = 3.5.17
+reactor_netty = 1.1.22
+reactor = 3.5.20
# client dependencies
httpclient = 4.5.14
@@ -70,5 +72,5 @@ jzlib = 1.1.3
resteasy = 6.2.4.Final
# opentelemetry dependencies
-opentelemetry = 1.36.0
-opentelemetrysemconv = 1.23.1-alpha
+opentelemetry = 1.41.0
+opentelemetrysemconv = 1.27.0-alpha
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
index 80f878d25c859..20f0563a2d032 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java
@@ -154,6 +154,9 @@ static Request bulk(BulkRequest bulkRequest) throws IOException {
parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy());
parameters.withPipeline(bulkRequest.pipeline());
parameters.withRouting(bulkRequest.routing());
+ if (bulkRequest.requireAlias() != null) {
+ parameters.withRequireAlias(bulkRequest.requireAlias());
+ }
// Bulk API only supports newline delimited JSON or Smile. Before executing
// the bulk, we need to check that all requests have the same content-type
// and this content-type is supported by the Bulk API.
@@ -232,6 +235,10 @@ static Request bulk(BulkRequest bulkRequest) throws IOException {
metadata.field("_source", updateRequest.fetchSource());
}
}
+
+ if (action.isRequireAlias()) {
+ metadata.field("require_alias", action.isRequireAlias());
+ }
metadata.endObject();
}
metadata.endObject();
@@ -533,7 +540,7 @@ static Request searchTemplate(SearchTemplateRequest searchTemplateRequest) throw
Request request;
if (searchTemplateRequest.isSimulate()) {
- request = new Request(HttpGet.METHOD_NAME, "_render/template");
+ request = new Request(HttpGet.METHOD_NAME, "/_render/template");
} else {
SearchRequest searchRequest = searchTemplateRequest.getRequest();
String endpoint = endpoint(searchRequest.indices(), "_search/template");
@@ -796,8 +803,7 @@ static Request termVectors(TermVectorsRequest tvrequest) throws IOException {
}
static Request mtermVectors(MultiTermVectorsRequest mtvrequest) throws IOException {
- String endpoint = "_mtermvectors";
- Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+ Request request = new Request(HttpGet.METHOD_NAME, "/_mtermvectors");
request.setEntity(createEntity(mtvrequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
index 94303097c772d..d587f76e61b49 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java
@@ -2227,11 +2227,11 @@ protected final Resp parseEntity(final HttpEntity entity, final CheckedFu
if (entity.getContentType() == null) {
throw new IllegalStateException("OpenSearch didn't return the [Content-Type] header, unable to parse response body");
}
- MediaType medaiType = MediaType.fromMediaType(entity.getContentType().getValue());
- if (medaiType == null) {
+ MediaType mediaType = MediaType.fromMediaType(entity.getContentType().getValue());
+ if (mediaType == null) {
throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue());
}
- try (XContentParser parser = medaiType.xContent().createParser(registry, DEPRECATION_HANDLER, entity.getContent())) {
+ try (XContentParser parser = mediaType.xContent().createParser(registry, DEPRECATION_HANDLER, entity.getContent())) {
return entityParser.apply(parser);
}
}
diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/GetIndexResponse.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/GetIndexResponse.java
index 6ec1c312c9ba9..1ceaeab6c0064 100644
--- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/GetIndexResponse.java
+++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/GetIndexResponse.java
@@ -34,6 +34,7 @@
import org.apache.lucene.util.CollectionUtil;
import org.opensearch.cluster.metadata.AliasMetadata;
+import org.opensearch.cluster.metadata.Context;
import org.opensearch.cluster.metadata.MappingMetadata;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.xcontent.XContentParser;
@@ -61,6 +62,7 @@ public class GetIndexResponse {
private Map settings;
private Map defaultSettings;
private Map dataStreams;
+ private Map contexts;
private String[] indices;
GetIndexResponse(
@@ -69,7 +71,8 @@ public class GetIndexResponse {
Map> aliases,
Map settings,
Map defaultSettings,
- Map dataStreams
+ Map dataStreams,
+ Map contexts
) {
this.indices = indices;
// to have deterministic order
@@ -89,6 +92,9 @@ public class GetIndexResponse {
if (dataStreams != null) {
this.dataStreams = dataStreams;
}
+ if (contexts != null) {
+ this.contexts = contexts;
+ }
}
public String[] getIndices() {
@@ -123,6 +129,10 @@ public Map getDataStreams() {
return dataStreams;
}
+ public Map contexts() {
+ return contexts;
+ }
+
/**
* Returns the string value for the specified index and setting. If the includeDefaults flag was not set or set to
* false on the {@link GetIndexRequest}, this method will only return a value where the setting was explicitly set
@@ -167,6 +177,7 @@ private static IndexEntry parseIndexEntry(XContentParser parser) throws IOExcept
Settings indexSettings = null;
Settings indexDefaultSettings = null;
String dataStream = null;
+ Context context = null;
// We start at START_OBJECT since fromXContent ensures that
while (parser.nextToken() != Token.END_OBJECT) {
ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser);
@@ -185,6 +196,9 @@ private static IndexEntry parseIndexEntry(XContentParser parser) throws IOExcept
case "defaults":
indexDefaultSettings = Settings.fromXContent(parser);
break;
+ case "context":
+ context = Context.fromXContent(parser);
+ break;
default:
parser.skipChildren();
}
@@ -197,7 +211,7 @@ private static IndexEntry parseIndexEntry(XContentParser parser) throws IOExcept
parser.skipChildren();
}
}
- return new IndexEntry(indexAliases, indexMappings, indexSettings, indexDefaultSettings, dataStream);
+ return new IndexEntry(indexAliases, indexMappings, indexSettings, indexDefaultSettings, dataStream, context);
}
// This is just an internal container to make stuff easier for returning
@@ -207,19 +221,22 @@ private static class IndexEntry {
Settings indexSettings = Settings.EMPTY;
Settings indexDefaultSettings = Settings.EMPTY;
String dataStream;
+ Context context;
IndexEntry(
List indexAliases,
MappingMetadata indexMappings,
Settings indexSettings,
Settings indexDefaultSettings,
- String dataStream
+ String dataStream,
+ Context context
) {
if (indexAliases != null) this.indexAliases = indexAliases;
if (indexMappings != null) this.indexMappings = indexMappings;
if (indexSettings != null) this.indexSettings = indexSettings;
if (indexDefaultSettings != null) this.indexDefaultSettings = indexDefaultSettings;
if (dataStream != null) this.dataStream = dataStream;
+ if (context != null) this.context = context;
}
}
@@ -229,6 +246,7 @@ public static GetIndexResponse fromXContent(XContentParser parser) throws IOExce
Map settings = new HashMap<>();
Map defaultSettings = new HashMap<>();
Map dataStreams = new HashMap<>();
+ Map contexts = new HashMap<>();
List indices = new ArrayList<>();
if (parser.currentToken() == null) {
@@ -254,12 +272,15 @@ public static GetIndexResponse fromXContent(XContentParser parser) throws IOExce
if (indexEntry.dataStream != null) {
dataStreams.put(indexName, indexEntry.dataStream);
}
+ if (indexEntry.context != null) {
+ contexts.put(indexName, indexEntry.context);
+ }
} else if (parser.currentToken() == Token.START_ARRAY) {
parser.skipChildren();
} else {
parser.nextToken();
}
}
- return new GetIndexResponse(indices.toArray(new String[0]), mappings, aliases, settings, defaultSettings, dataStreams);
+ return new GetIndexResponse(indices.toArray(new String[0]), mappings, aliases, settings, defaultSettings, dataStreams, contexts);
}
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java
index da9f790215669..f5b1b0768ff4a 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java
@@ -1299,4 +1299,61 @@ public void testMultiTermvectors() throws IOException {
}
}
}
+
+ public void testBulkWithRequireAlias() throws IOException {
+ {
+ String indexAliasName = "testindex-1";
+
+ BulkRequest bulkRequest = new BulkRequest(indexAliasName);
+ bulkRequest.requireAlias(true);
+ bulkRequest.add(new IndexRequest().id("1").source("{ \"name\": \"Biden\" }", XContentType.JSON));
+ bulkRequest.add(new IndexRequest().id("2").source("{ \"name\": \"Trump\" }", XContentType.JSON));
+
+ BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT);
+
+ assertFalse("Should not auto-create the '" + indexAliasName + "' index.", indexExists(indexAliasName));
+ assertTrue("Bulk response must have failures.", bulkResponse.hasFailures());
+ }
+ {
+ String indexAliasName = "testindex-2";
+
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.requireAlias(true);
+ bulkRequest.add(new IndexRequest().index(indexAliasName).id("1").source("{ \"name\": \"Biden\" }", XContentType.JSON));
+ bulkRequest.add(new IndexRequest().index(indexAliasName).id("2").source("{ \"name\": \"Trump\" }", XContentType.JSON));
+
+ BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT);
+
+ assertFalse("Should not auto-create the '" + indexAliasName + "' index.", indexExists(indexAliasName));
+ assertTrue("Bulk response must have failures.", bulkResponse.hasFailures());
+ }
+ {
+ String indexAliasName = "testindex-3";
+
+ BulkRequest bulkRequest = new BulkRequest(indexAliasName);
+ bulkRequest.add(new IndexRequest().id("1").setRequireAlias(true).source("{ \"name\": \"Biden\" }", XContentType.JSON));
+ bulkRequest.add(new IndexRequest().id("2").setRequireAlias(true).source("{ \"name\": \"Trump\" }", XContentType.JSON));
+
+ BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT);
+
+ assertFalse("Should not auto-create the '" + indexAliasName + "' index.", indexExists(indexAliasName));
+ assertTrue("Bulk response must have failures.", bulkResponse.hasFailures());
+ }
+ {
+ String indexAliasName = "testindex-4";
+
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(
+ new IndexRequest().index(indexAliasName).id("1").setRequireAlias(true).source("{ \"name\": \"Biden\" }", XContentType.JSON)
+ );
+ bulkRequest.add(
+ new IndexRequest().index(indexAliasName).id("2").setRequireAlias(true).source("{ \"name\": \"Trump\" }", XContentType.JSON)
+ );
+
+ BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT);
+
+ assertFalse("Should not auto-create the '" + indexAliasName + "' index.", indexExists(indexAliasName));
+ assertTrue("Bulk response must have failures.", bulkResponse.hasFailures());
+ }
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java
index 649e921b54cbe..d2dde8d3c2684 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java
@@ -701,7 +701,7 @@ public void testOpenExistingIndex() throws IOException {
closeIndex(index);
ResponseException exception = expectThrows(
ResponseException.class,
- () -> client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"))
+ () -> client().performRequest(new Request(HttpGet.METHOD_NAME, "/" + index + "/_search"))
);
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus()));
assertThat(exception.getMessage().contains(index), equalTo(true));
@@ -714,7 +714,7 @@ public void testOpenExistingIndex() throws IOException {
);
assertTrue(openIndexResponse.isAcknowledged());
- Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"));
+ Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, "/" + index + "/_search"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
}
@@ -771,7 +771,7 @@ public void testCloseExistingIndex() throws IOException {
ResponseException exception = expectThrows(
ResponseException.class,
- () -> client().performRequest(new Request(HttpGet.METHOD_NAME, indexResult.getIndex() + "/_search"))
+ () -> client().performRequest(new Request(HttpGet.METHOD_NAME, "/" + indexResult.getIndex() + "/_search"))
);
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus()));
assertThat(exception.getMessage().contains(indexResult.getIndex()), equalTo(true));
@@ -1270,7 +1270,7 @@ public void testGetAliasesNonExistentIndexOrAlias() throws IOException {
assertThat(getAliasesResponse.getException(), nullValue());
}
createIndex(index, Settings.EMPTY);
- client().performRequest(new Request(HttpPut.METHOD_NAME, index + "/_alias/" + alias));
+ client().performRequest(new Request(HttpPut.METHOD_NAME, "/" + index + "/_alias/" + alias));
{
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices(index, "non_existent_index");
GetAliasesResponse getAliasesResponse = execute(
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/RankEvalIT.java
index 47add92ecaccd..01fdd489aa7d8 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/RankEvalIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/RankEvalIT.java
@@ -121,7 +121,7 @@ public void testRankEvalRequest() throws IOException {
}
// now try this when test2 is closed
- client().performRequest(new Request("POST", "index2/_close"));
+ client().performRequest(new Request("POST", "/index2/_close"));
rankEvalRequest.indicesOptions(IndicesOptions.fromParameters(null, "true", null, "false", SearchRequest.DEFAULT_INDICES_OPTIONS));
response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync);
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java
index 2be3c27c6b5ce..2a1fc179d13bc 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java
@@ -1399,7 +1399,7 @@ public void testRenderSearchTemplate() throws Exception {
// Verify that the resulting REST request looks as expected.
Request request = RequestConverters.searchTemplate(searchTemplateRequest);
- String endpoint = "_render/template";
+ String endpoint = "/_render/template";
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals(endpoint, request.getEndpoint());
@@ -1565,7 +1565,7 @@ public void testMultiTermVectors() throws IOException {
Request request = RequestConverters.mtermVectors(mtvRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
- assertEquals("_mtermvectors", request.getEndpoint());
+ assertEquals("/_mtermvectors", request.getEndpoint());
assertToXContentBody(mtvRequest, request.getEntity());
}
@@ -1585,7 +1585,7 @@ public void testMultiTermVectorsWithType() throws IOException {
Request request = RequestConverters.mtermVectors(mtvRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
- assertEquals("_mtermvectors", request.getEndpoint());
+ assertEquals("/_mtermvectors", request.getEndpoint());
assertToXContentBody(mtvRequest, request.getEntity());
}
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java
index b962fa8ff415e..aeb052fdce138 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java
@@ -727,7 +727,7 @@ public void testSearchWithSuggest() throws IOException {
}
public void testSearchWithWeirdScriptFields() throws Exception {
- Request doc = new Request("PUT", "test/_doc/1");
+ Request doc = new Request("PUT", "/test/_doc/1");
doc.setJsonEntity("{\"field\":\"value\"}");
client().performRequest(doc);
client().performRequest(new Request("POST", "/test/_refresh"));
@@ -774,7 +774,7 @@ public void testSearchWithWeirdScriptFields() throws Exception {
public void testSearchWithDerivedFields() throws Exception {
// Just testing DerivedField definition from SearchSourceBuilder derivedField()
// We are not testing the full functionality here
- Request doc = new Request("PUT", "test/_doc/1");
+ Request doc = new Request("PUT", "/test/_doc/1");
doc.setJsonEntity("{\"field\":\"value\"}");
client().performRequest(doc);
client().performRequest(new Request("POST", "/test/_refresh"));
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java
index 93ffd7cade7c3..fef8f4ab3991e 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java
@@ -230,6 +230,7 @@ public void testSnapshotsStatus() {
Map expectedParams = new HashMap<>();
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
String[] snapshots = RequestConvertersTests.randomIndicesNames(1, 5);
+ String[] indices = RequestConvertersTests.randomIndicesNames(1, 5);
StringBuilder snapshotNames = new StringBuilder(snapshots[0]);
for (int idx = 1; idx < snapshots.length; idx++) {
snapshotNames.append(",").append(snapshots[idx]);
@@ -237,8 +238,9 @@ public void testSnapshotsStatus() {
boolean ignoreUnavailable = randomBoolean();
String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status";
- SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots);
+ SnapshotsStatusRequest snapshotsStatusRequest = (new SnapshotsStatusRequest(repository, snapshots)).indices(indices);
RequestConvertersTests.setRandomMasterTimeout(snapshotsStatusRequest, expectedParams);
+
snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable);
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SearchDocumentationIT.java
index bf0f70304168e..326dde54cfb61 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SearchDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SearchDocumentationIT.java
@@ -998,7 +998,7 @@ public void onFailure(Exception e) {
protected void registerQueryScript(RestClient restClient) throws IOException {
// tag::register-script
- Request scriptRequest = new Request("POST", "_scripts/title_search");
+ Request scriptRequest = new Request("POST", "/_scripts/title_search");
scriptRequest.setJsonEntity(
"{" +
" \"script\": {" +
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java
index d0015db044843..6949bc382bfe8 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/SnapshotClientDocumentationIT.java
@@ -827,7 +827,7 @@ private void createTestIndex() throws IOException {
}
private void createTestSnapshots() throws IOException {
- Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName));
+ Request createSnapshot = new Request("put", String.format(Locale.ROOT, "/_snapshot/%s/%s", repositoryName, snapshotName));
createSnapshot.addParameter("wait_for_completion", "true");
createSnapshot.setJsonEntity("{\"indices\":\"" + indexName + "\"}");
Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot);
diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java
index a00f0487116dc..fa313e68f8a35 100644
--- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexResponseTests.java
@@ -36,6 +36,7 @@
import org.opensearch.client.AbstractResponseTestCase;
import org.opensearch.client.GetAliasesResponseTests;
import org.opensearch.cluster.metadata.AliasMetadata;
+import org.opensearch.cluster.metadata.Context;
import org.opensearch.cluster.metadata.MappingMetadata;
import org.opensearch.common.settings.IndexScopedSettings;
import org.opensearch.common.settings.Settings;
@@ -66,6 +67,7 @@ protected org.opensearch.action.admin.indices.get.GetIndexResponse createServerT
final Map settings = new HashMap<>();
final Map defaultSettings = new HashMap<>();
final Map dataStreams = new HashMap<>();
+ final Map contexts = new HashMap<>();
IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS;
boolean includeDefaults = randomBoolean();
for (String index : indices) {
@@ -90,6 +92,10 @@ protected org.opensearch.action.admin.indices.get.GetIndexResponse createServerT
if (randomBoolean()) {
dataStreams.put(index, randomAlphaOfLength(5).toLowerCase(Locale.ROOT));
}
+
+ if (randomBoolean()) {
+ contexts.put(index, new Context(randomAlphaOfLength(5).toLowerCase(Locale.ROOT)));
+ }
}
return new org.opensearch.action.admin.indices.get.GetIndexResponse(
indices,
@@ -97,7 +103,8 @@ protected org.opensearch.action.admin.indices.get.GetIndexResponse createServerT
aliases,
settings,
defaultSettings,
- dataStreams
+ dataStreams,
+ null
);
}
@@ -116,6 +123,7 @@ protected void assertInstances(
assertEquals(serverTestInstance.getSettings(), clientInstance.getSettings());
assertEquals(serverTestInstance.defaultSettings(), clientInstance.getDefaultSettings());
assertEquals(serverTestInstance.getAliases(), clientInstance.getAliases());
+ assertEquals(serverTestInstance.contexts(), clientInstance.contexts());
}
private static MappingMetadata createMappingsForIndex() {
diff --git a/client/rest/build.gradle b/client/rest/build.gradle
index 42012ab935e72..a448e3d34dc22 100644
--- a/client/rest/build.gradle
+++ b/client/rest/build.gradle
@@ -51,6 +51,10 @@ dependencies {
api "commons-codec:commons-codec:${versions.commonscodec}"
api "commons-logging:commons-logging:${versions.commonslogging}"
+ // reactor
+ api "io.projectreactor:reactor-core:${versions.reactor}"
+ api "org.reactivestreams:reactive-streams:${versions.reactivestreams}"
+
testImplementation project(":client:test")
testImplementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testImplementation "junit:junit:${versions.junit}"
@@ -92,18 +96,50 @@ testingConventions {
}
}
-thirdPartyAudit.ignoreMissingClasses(
- //commons-logging optional dependencies
- 'org.apache.avalon.framework.logger.Logger',
- 'org.apache.log.Hierarchy',
- 'org.apache.log.Logger',
- 'org.apache.log4j.Level',
- 'org.apache.log4j.Logger',
- 'org.apache.log4j.Priority',
- //commons-logging provided dependencies
- 'javax.servlet.ServletContextEvent',
- 'javax.servlet.ServletContextListener'
-)
+thirdPartyAudit {
+ ignoreMissingClasses(
+ 'org.slf4j.Logger',
+ 'org.slf4j.LoggerFactory',
+ //commons-logging optional dependencies
+ 'org.apache.avalon.framework.logger.Logger',
+ 'org.apache.log.Hierarchy',
+ 'org.apache.log.Logger',
+ 'org.apache.log4j.Level',
+ 'org.apache.log4j.Logger',
+ 'org.apache.log4j.Priority',
+ //commons-logging provided dependencies
+ 'javax.servlet.ServletContextEvent',
+ 'javax.servlet.ServletContextListener',
+ 'io.micrometer.context.ContextAccessor',
+ 'io.micrometer.context.ContextRegistry',
+ 'io.micrometer.context.ContextSnapshot',
+ 'io.micrometer.context.ContextSnapshot$Scope',
+ 'io.micrometer.context.ContextSnapshotFactory',
+ 'io.micrometer.context.ContextSnapshotFactory$Builder',
+ 'io.micrometer.context.ThreadLocalAccessor',
+ 'io.micrometer.core.instrument.Clock',
+ 'io.micrometer.core.instrument.Counter',
+ 'io.micrometer.core.instrument.Counter$Builder',
+ 'io.micrometer.core.instrument.DistributionSummary',
+ 'io.micrometer.core.instrument.DistributionSummary$Builder',
+ 'io.micrometer.core.instrument.Meter',
+ 'io.micrometer.core.instrument.MeterRegistry',
+ 'io.micrometer.core.instrument.Metrics',
+ 'io.micrometer.core.instrument.Tag',
+ 'io.micrometer.core.instrument.Tags',
+ 'io.micrometer.core.instrument.Timer',
+ 'io.micrometer.core.instrument.Timer$Builder',
+ 'io.micrometer.core.instrument.Timer$Sample',
+ 'io.micrometer.core.instrument.binder.jvm.ExecutorServiceMetrics',
+ 'io.micrometer.core.instrument.composite.CompositeMeterRegistry',
+ 'io.micrometer.core.instrument.search.Search',
+ 'reactor.blockhound.BlockHound$Builder',
+ 'reactor.blockhound.integration.BlockHoundIntegration'
+ )
+ ignoreViolations(
+ 'reactor.core.publisher.Traces$SharedSecretsCallSiteSupplierFactory$TracingException'
+ )
+}
tasks.withType(JavaCompile) {
// Suppressing '[options] target value 8 is obsolete and will be removed in a future release'
diff --git a/client/rest/licenses/reactive-streams-1.0.4.jar.sha1 b/client/rest/licenses/reactive-streams-1.0.4.jar.sha1
new file mode 100644
index 0000000000000..45a80e3f7e361
--- /dev/null
+++ b/client/rest/licenses/reactive-streams-1.0.4.jar.sha1
@@ -0,0 +1 @@
+3864a1320d97d7b045f729a326e1e077661f31b7
\ No newline at end of file
diff --git a/client/rest/licenses/reactive-streams-LICENSE.txt b/client/rest/licenses/reactive-streams-LICENSE.txt
new file mode 100644
index 0000000000000..1e3c7e7c77495
--- /dev/null
+++ b/client/rest/licenses/reactive-streams-LICENSE.txt
@@ -0,0 +1,21 @@
+MIT No Attribution
+
+Copyright 2014 Reactive Streams
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-NOTICE.txt b/client/rest/licenses/reactive-streams-NOTICE.txt
similarity index 100%
rename from plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-NOTICE.txt
rename to client/rest/licenses/reactive-streams-NOTICE.txt
diff --git a/client/rest/licenses/reactor-core-3.5.20.jar.sha1 b/client/rest/licenses/reactor-core-3.5.20.jar.sha1
new file mode 100644
index 0000000000000..0c80be89f66c8
--- /dev/null
+++ b/client/rest/licenses/reactor-core-3.5.20.jar.sha1
@@ -0,0 +1 @@
+1fc0f91e2b93778a974339d2c24363d7f34f90b4
\ No newline at end of file
diff --git a/client/rest/licenses/reactor-core-LICENSE.txt b/client/rest/licenses/reactor-core-LICENSE.txt
new file mode 100644
index 0000000000000..e5583c184e67a
--- /dev/null
+++ b/client/rest/licenses/reactor-core-LICENSE.txt
@@ -0,0 +1,201 @@
+Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/client/rest/licenses/reactor-core-NOTICE.txt b/client/rest/licenses/reactor-core-NOTICE.txt
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/client/rest/src/main/java/org/opensearch/client/Cancellable.java b/client/rest/src/main/java/org/opensearch/client/Cancellable.java
index 4bfc0704227aa..6b698ad351d86 100644
--- a/client/rest/src/main/java/org/opensearch/client/Cancellable.java
+++ b/client/rest/src/main/java/org/opensearch/client/Cancellable.java
@@ -34,6 +34,8 @@
import org.apache.http.client.methods.AbstractExecutionAwareRequest;
import org.apache.http.client.methods.HttpRequestBase;
+import java.io.IOException;
+import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
/**
@@ -75,7 +77,7 @@ public synchronized void cancel() {
}
/**
- * Executes some arbitrary code iff the on-going request has not been cancelled, otherwise throws {@link CancellationException}.
+ * Executes some arbitrary code if the on-going request has not been cancelled, otherwise throws {@link CancellationException}.
* This is needed to guarantee that cancelling a request works correctly even in case {@link #cancel()} is called between different
* attempts of the same request. The low-level client reuses the same instance of the {@link AbstractExecutionAwareRequest} by calling
* {@link AbstractExecutionAwareRequest#reset()} between subsequent retries. The {@link #cancel()} method can be called at anytime,
@@ -93,6 +95,31 @@ synchronized void runIfNotCancelled(Runnable runnable) {
runnable.run();
}
+ /**
+ * Executes some arbitrary code if the on-going request has not been cancelled, otherwise throws {@link CancellationException}.
+ * This is needed to guarantee that cancelling a request works correctly even in case {@link #cancel()} is called between different
+ * attempts of the same request. The low-level client reuses the same instance of the {@link AbstractExecutionAwareRequest} by calling
+ * {@link AbstractExecutionAwareRequest#reset()} between subsequent retries. The {@link #cancel()} method can be called at anytime,
+ * and we need to handle the case where it gets called while there is no request being executed as one attempt may have failed and
+ * the subsequent attempt has not been started yet.
+ * If the request has already been cancelled we don't go ahead with the next attempt, and artificially raise the
+ * {@link CancellationException}, otherwise we run the provided {@link Runnable} which will reset the request and send the next attempt.
+ * Note that this method must be synchronized as well as the {@link #cancel()} method, to prevent a request from being cancelled
+ * when there is no future to cancel, which would make cancelling the request a no-op.
+ */
+ synchronized T callIfNotCancelled(Callable callable) throws IOException {
+ if (this.httpRequest.isAborted()) {
+ throw newCancellationException();
+ }
+ try {
+ return callable.call();
+ } catch (final IOException ex) {
+ throw ex;
+ } catch (final Exception ex) {
+ throw new IOException(ex);
+ }
+ }
+
static CancellationException newCancellationException() {
return new CancellationException("request was cancelled");
}
diff --git a/client/rest/src/main/java/org/opensearch/client/Message.java b/client/rest/src/main/java/org/opensearch/client/Message.java
new file mode 100644
index 0000000000000..562b95f38d4ea
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/Message.java
@@ -0,0 +1,36 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import org.apache.http.HttpMessage;
+
+import java.util.Objects;
+
+final class Message {
+ private final H head;
+ private final B body;
+
+ public Message(final H head, final B body) {
+ this.head = Objects.requireNonNull(head, "Message head");
+ this.body = body;
+ }
+
+ public H getHead() {
+ return head;
+ }
+
+ public B getBody() {
+ return body;
+ }
+
+ @Override
+ public String toString() {
+ return "[" + "head=" + head + ", body=" + body + ']';
+ }
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/ReactiveDataConsumer.java b/client/rest/src/main/java/org/opensearch/client/ReactiveDataConsumer.java
new file mode 100644
index 0000000000000..e79832067bd27
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/ReactiveDataConsumer.java
@@ -0,0 +1,122 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import org.apache.http.util.Args;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.reactivestreams.Publisher;
+import org.reactivestreams.Subscriber;
+import org.reactivestreams.Subscription;
+
+/**
+ * Adapted from https://github.com/apache/httpcomponents-core/blob/master/httpcore5-reactive/src/main/java/org/apache/hc/core5/reactive/ReactiveDataConsumer.java
+ */
+class ReactiveDataConsumer implements Publisher {
+
+ private final AtomicLong requests = new AtomicLong(0);
+
+ private final BlockingQueue buffers = new LinkedBlockingQueue<>();
+ private final AtomicBoolean flushInProgress = new AtomicBoolean(false);
+ private final AtomicInteger windowScalingIncrement = new AtomicInteger(0);
+ private volatile boolean completed;
+ private volatile Exception exception;
+ private volatile Subscriber super ByteBuffer> subscriber;
+
+ private final ReentrantLock lock = new ReentrantLock();
+
+ public void failed(final Exception cause) {
+ if (!completed) {
+ exception = cause;
+ flushToSubscriber();
+ }
+ }
+
+ public void consume(final ByteBuffer byteBuffer) throws IOException {
+ if (completed) {
+ throw new IllegalStateException("Received data past end of stream");
+ }
+
+ final byte[] copy = new byte[byteBuffer.remaining()];
+ byteBuffer.get(copy);
+ buffers.add(ByteBuffer.wrap(copy));
+
+ flushToSubscriber();
+ }
+
+ public void complete() {
+ completed = true;
+ flushToSubscriber();
+ }
+
+ private void flushToSubscriber() {
+ lock.lock();
+ try {
+ final Subscriber super ByteBuffer> s = subscriber;
+ if (flushInProgress.getAndSet(true)) {
+ return;
+ }
+ try {
+ if (s == null) {
+ return;
+ }
+ if (exception != null) {
+ subscriber = null;
+ s.onError(exception);
+ return;
+ }
+ ByteBuffer next;
+ while (requests.get() > 0 && ((next = buffers.poll()) != null)) {
+ final int bytesFreed = next.remaining();
+ s.onNext(next);
+ requests.decrementAndGet();
+ windowScalingIncrement.addAndGet(bytesFreed);
+ }
+ if (completed && buffers.isEmpty()) {
+ subscriber = null;
+ s.onComplete();
+ }
+ } finally {
+ flushInProgress.set(false);
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ @Override
+ public void subscribe(final Subscriber super ByteBuffer> subscriber) {
+ this.subscriber = Args.notNull(subscriber, "subscriber");
+ subscriber.onSubscribe(new Subscription() {
+ @Override
+ public void request(final long increment) {
+ if (increment <= 0) {
+ failed(new IllegalArgumentException("The number of elements requested must be strictly positive"));
+ return;
+ }
+ requests.addAndGet(increment);
+ flushToSubscriber();
+ }
+
+ @Override
+ public void cancel() {
+ ReactiveDataConsumer.this.subscriber = null;
+ }
+ });
+ }
+
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/ReactiveDataProducer.java b/client/rest/src/main/java/org/opensearch/client/ReactiveDataProducer.java
new file mode 100644
index 0000000000000..0f80af81ab60b
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/ReactiveDataProducer.java
@@ -0,0 +1,147 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import org.apache.http.nio.ContentEncoder;
+import org.apache.http.nio.IOControl;
+import org.apache.http.nio.entity.HttpAsyncContentProducer;
+import org.apache.http.util.Args;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.reactivestreams.Publisher;
+import org.reactivestreams.Subscriber;
+import org.reactivestreams.Subscription;
+
+/**
+ * Adapted from https://github.com/apache/httpcomponents-core/blob/master/httpcore5-reactive/src/main/java/org/apache/hc/core5/reactive/ReactiveDataProducer.java
+ */
+class ReactiveDataProducer implements HttpAsyncContentProducer, Subscriber {
+ private static final int BUFFER_WINDOW_SIZE = 5;
+ private final AtomicReference controlChannel = new AtomicReference<>();
+ private final AtomicReference exception = new AtomicReference<>();
+ private final AtomicBoolean complete = new AtomicBoolean(false);
+ private final Publisher publisher;
+ private final AtomicReference subscription = new AtomicReference<>();
+ private final ArrayDeque buffers = new ArrayDeque<>(); // This field requires synchronization
+ private final ReentrantLock lock;
+
+ public ReactiveDataProducer(final Publisher publisher) {
+ this.publisher = Args.notNull(publisher, "publisher");
+ this.lock = new ReentrantLock();
+ }
+
+ @Override
+ public void onSubscribe(final Subscription subscription) {
+ if (this.subscription.getAndSet(subscription) != null) {
+ throw new IllegalStateException("Already subscribed");
+ }
+
+ subscription.request(BUFFER_WINDOW_SIZE);
+ }
+
+ @Override
+ public void onNext(final ByteBuffer byteBuffer) {
+ final byte[] copy = new byte[byteBuffer.remaining()];
+ byteBuffer.get(copy);
+
+ lock.lock();
+ try {
+ buffers.add(ByteBuffer.wrap(copy));
+ } finally {
+ lock.unlock();
+ }
+
+ if (controlChannel.get() != null) {
+ controlChannel.get().requestOutput();
+ }
+ }
+
+ @Override
+ public void onError(final Throwable throwable) {
+ subscription.set(null);
+ exception.set(throwable);
+ if (controlChannel.get() != null) {
+ controlChannel.get().requestOutput();
+ }
+ }
+
+ @Override
+ public void onComplete() {
+ subscription.set(null);
+ complete.set(true);
+ if (controlChannel.get() != null) {
+ controlChannel.get().requestOutput();
+ }
+ }
+
+ @Override
+ public void produceContent(ContentEncoder encoder, IOControl ioControl) throws IOException {
+ if (controlChannel.get() == null) {
+ controlChannel.set(ioControl);
+ publisher.subscribe(this);
+ }
+
+ final Throwable t = exception.get();
+ final Subscription s = subscription.get();
+ int buffersToReplenish = 0;
+ try {
+ lock.lock();
+ try {
+ if (t != null) {
+ throw new IOException(t.getMessage(), t);
+ } else if (this.complete.get() && buffers.isEmpty()) {
+ encoder.complete();
+ } else {
+ while (!buffers.isEmpty()) {
+ final ByteBuffer nextBuffer = buffers.remove();
+ encoder.write(nextBuffer);
+ if (nextBuffer.remaining() > 0) {
+ buffers.push(nextBuffer);
+ break;
+ } else if (s != null) {
+ // We defer the #request call until after we release the buffer lock.
+ buffersToReplenish++;
+ }
+ }
+ }
+ } finally {
+ lock.unlock();
+ }
+ } finally {
+ if (s != null && buffersToReplenish > 0) {
+ s.request(buffersToReplenish);
+ }
+
+ if (!this.complete.get()) {
+ ioControl.suspendOutput();
+ }
+ }
+ }
+
+ @Override
+ public boolean isRepeatable() {
+ return false;
+ }
+
+ @Override
+ public void close() throws IOException {
+ controlChannel.set(null);
+
+ final Subscription s = subscription.getAndSet(null);
+ if (s != null) {
+ s.cancel();
+ }
+ }
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/ReactiveRequestProducer.java b/client/rest/src/main/java/org/opensearch/client/ReactiveRequestProducer.java
new file mode 100644
index 0000000000000..3f306a96f24c4
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/ReactiveRequestProducer.java
@@ -0,0 +1,166 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpEntityEnclosingRequest;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpRequest;
+import org.apache.http.entity.ContentType;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.nio.ContentEncoder;
+import org.apache.http.nio.IOControl;
+import org.apache.http.nio.protocol.HttpAsyncRequestProducer;
+import org.apache.http.protocol.HttpContext;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import org.reactivestreams.Publisher;
+
+class ReactiveRequestProducer implements HttpAsyncRequestProducer {
+ private final HttpHost target;
+ private final HttpRequest request;
+ private final ReactiveDataProducer producer;
+
+ private static final class ReactiveHttpEntity implements HttpEntity {
+ private final Header contentType;
+ private final Header contentEncoding;
+
+ private ReactiveHttpEntity(final Header contentType, final Header contentEncoding) {
+ this.contentType = contentType;
+ this.contentEncoding = contentEncoding;
+ }
+
+ @Override
+ public void writeTo(OutputStream outStream) throws IOException {
+ throw new UnsupportedOperationException("This operation is not supported");
+ }
+
+ @Override
+ public boolean isStreaming() {
+ return true;
+ }
+
+ @Override
+ public boolean isRepeatable() {
+ return false;
+ }
+
+ @Override
+ public boolean isChunked() {
+ return true;
+ }
+
+ @Override
+ public Header getContentType() {
+ return contentType;
+ }
+
+ @Override
+ public long getContentLength() {
+ return -1;
+ }
+
+ @Override
+ public Header getContentEncoding() {
+ return contentEncoding;
+ }
+
+ @Override
+ public InputStream getContent() throws IOException, UnsupportedOperationException {
+ throw new UnsupportedOperationException("This operation is not supported");
+ }
+
+ @Override
+ public void consumeContent() throws IOException {
+ throw new UnsupportedOperationException("This operation is not supported");
+ }
+ };
+
+ ReactiveRequestProducer(HttpRequest request, HttpHost target, Publisher publisher) {
+ this.target = target;
+ this.request = request;
+ this.producer = new ReactiveDataProducer(publisher);
+ }
+
+ @Override
+ public HttpRequest generateRequest() {
+ final Header contentTypeHeader = request.getFirstHeader("Content-Type");
+ if (contentTypeHeader == null) {
+ request.setHeader(new BasicHeader("Content-Type", ContentType.APPLICATION_JSON.getMimeType()));
+ }
+
+ if (request instanceof HttpEntityEnclosingRequest) {
+ final HttpEntityEnclosingRequest enclosingRequest = (HttpEntityEnclosingRequest) request;
+ enclosingRequest.setEntity(
+ new ReactiveHttpEntity(request.getFirstHeader("Content-Type"), request.getFirstHeader("Content-Encoding"))
+ );
+ }
+
+ return this.request;
+ }
+
+ @Override
+ public HttpHost getTarget() {
+ return this.target;
+ }
+
+ @Override
+ public void produceContent(final ContentEncoder encoder, final IOControl ioControl) throws IOException {
+ if (this.producer != null) {
+ this.producer.produceContent(encoder, ioControl);
+ if (encoder.isCompleted()) {
+ this.producer.close();
+ }
+ }
+ }
+
+ @Override
+ public void requestCompleted(final HttpContext context) {
+ this.producer.onComplete();
+ }
+
+ @Override
+ public void failed(final Exception ex) {
+ this.producer.onError(ex);
+ }
+
+ @Override
+ public boolean isRepeatable() {
+ return this.producer.isRepeatable();
+ }
+
+ @Override
+ public void resetRequest() throws IOException {
+ this.producer.close();
+ }
+
+ @Override
+ public void close() throws IOException {
+ this.producer.close();
+ }
+
+ @Override
+ public String toString() {
+ final StringBuilder sb = new StringBuilder();
+ sb.append(this.target);
+ sb.append(' ');
+ sb.append(this.request);
+ if (this.producer != null) {
+ sb.append(' ');
+ sb.append(this.producer);
+ }
+ return sb.toString();
+ }
+
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/ReactiveResponseConsumer.java b/client/rest/src/main/java/org/opensearch/client/ReactiveResponseConsumer.java
new file mode 100644
index 0000000000000..9ccd64d4aa942
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/ReactiveResponseConsumer.java
@@ -0,0 +1,57 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import org.apache.http.HttpException;
+import org.apache.http.HttpResponse;
+import org.apache.http.concurrent.BasicFuture;
+import org.apache.http.concurrent.FutureCallback;
+import org.apache.http.nio.IOControl;
+import org.apache.http.nio.client.methods.AsyncByteConsumer;
+import org.apache.http.protocol.HttpContext;
+import org.apache.http.util.Args;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.reactivestreams.Publisher;
+
+class ReactiveResponseConsumer extends AsyncByteConsumer {
+ private final BasicFuture>> responseFuture;
+ private final ReactiveDataConsumer reactiveDataConsumer = new ReactiveDataConsumer();
+
+ ReactiveResponseConsumer(final FutureCallback>> responseCallback) {
+ this.responseFuture = new BasicFuture<>(Args.notNull(responseCallback, "responseCallback"));
+ }
+
+ @Override
+ protected void onByteReceived(ByteBuffer buf, IOControl ioctrl) throws IOException {
+ reactiveDataConsumer.consume(buf);
+ ioctrl.requestInput();
+ }
+
+ @Override
+ public void onResponseReceived(HttpResponse response) throws HttpException, IOException {
+ responseFuture.completed(new Message<>(response, reactiveDataConsumer));
+ }
+
+ @Override
+ public HttpResponse buildResult(HttpContext context) throws Exception {
+ reactiveDataConsumer.complete();
+ return null;
+ }
+
+ @Override
+ protected void releaseResources() {
+ if (getException() != null) {
+ reactiveDataConsumer.failed(getException());
+ responseFuture.failed(getException());
+ }
+ }
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/Request.java b/client/rest/src/main/java/org/opensearch/client/Request.java
index df81ca7f717ae..9bf8cce3e70e7 100644
--- a/client/rest/src/main/java/org/opensearch/client/Request.java
+++ b/client/rest/src/main/java/org/opensearch/client/Request.java
@@ -110,7 +110,13 @@ public void addParameters(Map paramSource) {
* will change it.
*/
public Map getParameters() {
- return unmodifiableMap(parameters);
+ if (options.getParameters().isEmpty()) {
+ return unmodifiableMap(parameters);
+ } else {
+ Map combinedParameters = new HashMap<>(parameters);
+ combinedParameters.putAll(options.getParameters());
+ return unmodifiableMap(combinedParameters);
+ }
}
/**
diff --git a/client/rest/src/main/java/org/opensearch/client/RequestOptions.java b/client/rest/src/main/java/org/opensearch/client/RequestOptions.java
index 5390e303ff499..13a14b9e4da12 100644
--- a/client/rest/src/main/java/org/opensearch/client/RequestOptions.java
+++ b/client/rest/src/main/java/org/opensearch/client/RequestOptions.java
@@ -40,8 +40,11 @@
import java.util.ArrayList;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.Objects;
+import java.util.stream.Collectors;
/**
* The portion of an HTTP request to OpenSearch that can be
@@ -53,18 +56,21 @@ public final class RequestOptions {
*/
public static final RequestOptions DEFAULT = new Builder(
Collections.emptyList(),
+ Collections.emptyMap(),
HeapBufferedResponseConsumerFactory.DEFAULT,
null,
null
).build();
private final List headers;
+ private final Map parameters;
private final HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory;
private final WarningsHandler warningsHandler;
private final RequestConfig requestConfig;
private RequestOptions(Builder builder) {
this.headers = Collections.unmodifiableList(new ArrayList<>(builder.headers));
+ this.parameters = Collections.unmodifiableMap(new HashMap<>(builder.parameters));
this.httpAsyncResponseConsumerFactory = builder.httpAsyncResponseConsumerFactory;
this.warningsHandler = builder.warningsHandler;
this.requestConfig = builder.requestConfig;
@@ -74,7 +80,7 @@ private RequestOptions(Builder builder) {
* Create a builder that contains these options but can be modified.
*/
public Builder toBuilder() {
- return new Builder(headers, httpAsyncResponseConsumerFactory, warningsHandler, requestConfig);
+ return new Builder(headers, parameters, httpAsyncResponseConsumerFactory, warningsHandler, requestConfig);
}
/**
@@ -84,6 +90,14 @@ public List getHeaders() {
return headers;
}
+ /**
+ * Query parameters to attach to the request. Any parameters present here
+ * will override matching parameters in the {@link Request}, if they exist.
+ */
+ public Map getParameters() {
+ return parameters;
+ }
+
/**
* The {@link HttpAsyncResponseConsumerFactory} used to create one
* {@link HttpAsyncResponseConsumer} callback per retry. Controls how the
@@ -139,6 +153,12 @@ public String toString() {
b.append(headers.get(h).toString());
}
}
+ if (parameters.size() > 0) {
+ if (comma) b.append(", ");
+ comma = true;
+ b.append("parameters=");
+ b.append(parameters.entrySet().stream().map(e -> e.getKey() + "=" + e.getValue()).collect(Collectors.joining(",")));
+ }
if (httpAsyncResponseConsumerFactory != HttpAsyncResponseConsumerFactory.DEFAULT) {
if (comma) b.append(", ");
comma = true;
@@ -163,13 +183,14 @@ public boolean equals(Object obj) {
RequestOptions other = (RequestOptions) obj;
return headers.equals(other.headers)
+ && parameters.equals(other.parameters)
&& httpAsyncResponseConsumerFactory.equals(other.httpAsyncResponseConsumerFactory)
&& Objects.equals(warningsHandler, other.warningsHandler);
}
@Override
public int hashCode() {
- return Objects.hash(headers, httpAsyncResponseConsumerFactory, warningsHandler);
+ return Objects.hash(headers, parameters, httpAsyncResponseConsumerFactory, warningsHandler);
}
/**
@@ -179,17 +200,20 @@ public int hashCode() {
*/
public static class Builder {
private final List headers;
+ private final Map parameters;
private HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory;
private WarningsHandler warningsHandler;
private RequestConfig requestConfig;
private Builder(
List headers,
+ Map parameters,
HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
WarningsHandler warningsHandler,
RequestConfig requestConfig
) {
this.headers = new ArrayList<>(headers);
+ this.parameters = new HashMap<>(parameters);
this.httpAsyncResponseConsumerFactory = httpAsyncResponseConsumerFactory;
this.warningsHandler = warningsHandler;
this.requestConfig = requestConfig;
@@ -216,6 +240,21 @@ public Builder addHeader(String name, String value) {
return this;
}
+ /**
+ * Add the provided query parameter to the request. Any parameters added here
+ * will override matching parameters in the {@link Request}, if they exist.
+ *
+ * @param name the query parameter name
+ * @param value the query parameter value
+ * @throws NullPointerException if {@code name} or {@code value} is null.
+ */
+ public Builder addParameter(String name, String value) {
+ Objects.requireNonNull(name, "query parameter name cannot be null");
+ Objects.requireNonNull(value, "query parameter value cannot be null");
+ this.parameters.put(name, value);
+ return this;
+ }
+
/**
* Set the {@link HttpAsyncResponseConsumerFactory} used to create one
* {@link HttpAsyncResponseConsumer} callback per retry. Controls how the
diff --git a/client/rest/src/main/java/org/opensearch/client/Response.java b/client/rest/src/main/java/org/opensearch/client/Response.java
index d380607b7df9e..ad492b18f0e5c 100644
--- a/client/rest/src/main/java/org/opensearch/client/Response.java
+++ b/client/rest/src/main/java/org/opensearch/client/Response.java
@@ -39,11 +39,8 @@
import org.apache.http.RequestLine;
import org.apache.http.StatusLine;
-import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
/**
* Holds an opensearch response. It wraps the {@link HttpResponse} returned and associates it with
@@ -115,79 +112,11 @@ public HttpEntity getEntity() {
return response.getEntity();
}
- /**
- * Optimized regular expression to test if a string matches the RFC 1123 date
- * format (with quotes and leading space). Start/end of line characters and
- * atomic groups are used to prevent backtracking.
- */
- private static final Pattern WARNING_HEADER_DATE_PATTERN = Pattern.compile("^ " + // start of line, leading space
- // quoted RFC 1123 date format
- "\"" + // opening quote
- "(?>Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // day of week, atomic group to prevent backtracking
- "\\d{2} " + // 2-digit day
- "(?>Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month, atomic group to prevent backtracking
- "\\d{4} " + // 4-digit year
- "\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second)
- "GMT" + // GMT
- "\"$"); // closing quote (optional, since an older version can still send a warn-date), end of line
-
- /**
- * Length of RFC 1123 format (with quotes and leading space), used in
- * matchWarningHeaderPatternByPrefix(String).
- */
- private static final int WARNING_HEADER_DATE_LENGTH = 0 + 1 + 1 + 3 + 1 + 1 + 2 + 1 + 3 + 1 + 4 + 1 + 2 + 1 + 2 + 1 + 2 + 1 + 3 + 1;
-
- /**
- * Tests if a string matches the RFC 7234 specification for warning headers.
- * This assumes that the warn code is always 299 and the warn agent is always
- * OpenSearch.
- *
- * @param s the value of a warning header formatted according to RFC 7234
- * @return {@code true} if the input string matches the specification
- */
- private static boolean matchWarningHeaderPatternByPrefix(final String s) {
- return s.startsWith("299 OpenSearch-") || s.startsWith("299 Elasticsearch-");
- }
-
- /**
- * Refer to org.opensearch.common.logging.DeprecationLogger
- */
- private static String extractWarningValueFromWarningHeader(final String s) {
- String warningHeader = s;
-
- /*
- * The following block tests for the existence of a RFC 1123 date in the warning header. If the date exists, it is removed for
- * extractWarningValueFromWarningHeader(String) to work properly (as it does not handle dates).
- */
- if (s.length() > WARNING_HEADER_DATE_LENGTH) {
- final String possibleDateString = s.substring(s.length() - WARNING_HEADER_DATE_LENGTH);
- final Matcher matcher = WARNING_HEADER_DATE_PATTERN.matcher(possibleDateString);
-
- if (matcher.matches()) {
- warningHeader = warningHeader.substring(0, s.length() - WARNING_HEADER_DATE_LENGTH);
- }
- }
-
- final int firstQuote = warningHeader.indexOf('\"');
- final int lastQuote = warningHeader.length() - 1;
- final String warningValue = warningHeader.substring(firstQuote + 1, lastQuote);
- return warningValue;
- }
-
/**
* Returns a list of all warning headers returned in the response.
*/
public List getWarnings() {
- List warnings = new ArrayList<>();
- for (Header header : response.getHeaders("Warning")) {
- String warning = header.getValue();
- if (matchWarningHeaderPatternByPrefix(warning)) {
- warnings.add(extractWarningValueFromWarningHeader(warning));
- } else {
- warnings.add(warning);
- }
- }
- return warnings;
+ return ResponseWarningsExtractor.getWarnings(response);
}
/**
diff --git a/client/rest/src/main/java/org/opensearch/client/ResponseWarningsExtractor.java b/client/rest/src/main/java/org/opensearch/client/ResponseWarningsExtractor.java
new file mode 100644
index 0000000000000..46b63097d76b4
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/ResponseWarningsExtractor.java
@@ -0,0 +1,99 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import org.apache.http.Header;
+import org.apache.http.HttpResponse;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+final class ResponseWarningsExtractor {
+
+ /**
+ * Optimized regular expression to test if a string matches the RFC 1123 date
+ * format (with quotes and leading space). Start/end of line characters and
+ * atomic groups are used to prevent backtracking.
+ */
+ private static final Pattern WARNING_HEADER_DATE_PATTERN = Pattern.compile("^ " + // start of line, leading space
+ // quoted RFC 1123 date format
+ "\"" + // opening quote
+ "(?>Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // day of week, atomic group to prevent backtracking
+ "\\d{2} " + // 2-digit day
+ "(?>Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month, atomic group to prevent backtracking
+ "\\d{4} " + // 4-digit year
+ "\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second)
+ "GMT" + // GMT
+ "\"$"); // closing quote (optional, since an older version can still send a warn-date), end of line
+
+ /**
+ * Length of RFC 1123 format (with quotes and leading space), used in
+ * matchWarningHeaderPatternByPrefix(String).
+ */
+ private static final int WARNING_HEADER_DATE_LENGTH = 0 + 1 + 1 + 3 + 1 + 1 + 2 + 1 + 3 + 1 + 4 + 1 + 2 + 1 + 2 + 1 + 2 + 1 + 3 + 1;
+
+ private ResponseWarningsExtractor() {}
+
+ /**
+ * Returns a list of all warning headers returned in the response.
+ * @param response HTTP response
+ */
+ static List getWarnings(final HttpResponse response) {
+ List warnings = new ArrayList<>();
+ for (Header header : response.getHeaders("Warning")) {
+ String warning = header.getValue();
+ if (matchWarningHeaderPatternByPrefix(warning)) {
+ warnings.add(extractWarningValueFromWarningHeader(warning));
+ } else {
+ warnings.add(warning);
+ }
+ }
+ return warnings;
+ }
+
+ /**
+ * Tests if a string matches the RFC 7234 specification for warning headers.
+ * This assumes that the warn code is always 299 and the warn agent is always
+ * OpenSearch.
+ *
+ * @param s the value of a warning header formatted according to RFC 7234
+ * @return {@code true} if the input string matches the specification
+ */
+ private static boolean matchWarningHeaderPatternByPrefix(final String s) {
+ return s.startsWith("299 OpenSearch-");
+ }
+
+ /**
+ * Refer to org.opensearch.common.logging.DeprecationLogger
+ */
+ private static String extractWarningValueFromWarningHeader(final String s) {
+ String warningHeader = s;
+
+ /*
+ * The following block tests for the existence of a RFC 1123 date in the warning header. If the date exists, it is removed for
+ * extractWarningValueFromWarningHeader(String) to work properly (as it does not handle dates).
+ */
+ if (s.length() > WARNING_HEADER_DATE_LENGTH) {
+ final String possibleDateString = s.substring(s.length() - WARNING_HEADER_DATE_LENGTH);
+ final Matcher matcher = WARNING_HEADER_DATE_PATTERN.matcher(possibleDateString);
+
+ if (matcher.matches()) {
+ warningHeader = warningHeader.substring(0, s.length() - WARNING_HEADER_DATE_LENGTH);
+ }
+ }
+
+ final int firstQuote = warningHeader.indexOf('\"');
+ final int lastQuote = warningHeader.length() - 1;
+ final String warningValue = warningHeader.substring(firstQuote + 1, lastQuote);
+ return warningValue;
+ }
+
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/RestClient.java b/client/rest/src/main/java/org/opensearch/client/RestClient.java
index 3e07365f4f80e..ac86460adba97 100644
--- a/client/rest/src/main/java/org/opensearch/client/RestClient.java
+++ b/client/rest/src/main/java/org/opensearch/client/RestClient.java
@@ -60,6 +60,7 @@
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.client.BasicAuthCache;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
+import org.apache.http.message.BasicHttpResponse;
import org.apache.http.nio.client.methods.HttpAsyncMethods;
import org.apache.http.nio.protocol.HttpAsyncRequestProducer;
import org.apache.http.nio.protocol.HttpAsyncResponseConsumer;
@@ -75,6 +76,7 @@
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URISyntaxException;
+import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
@@ -90,6 +92,7 @@
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
+import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
@@ -97,6 +100,11 @@
import java.util.stream.Collectors;
import java.util.zip.GZIPOutputStream;
+import org.reactivestreams.Publisher;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+import reactor.core.publisher.MonoSink;
+
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Collections.singletonList;
@@ -291,6 +299,23 @@ public boolean isRunning() {
return client.isRunning();
}
+ /**
+ * Sends a streaming request to the OpenSearch cluster that the client points to and returns streaming response. This is an experimental API.
+ * @param request streaming request
+ * @return streaming response
+ * @throws IOException IOException
+ */
+ public StreamingResponse streamRequest(StreamingRequest request) throws IOException {
+ final InternalStreamingRequest internalRequest = new InternalStreamingRequest(request);
+
+ final StreamingResponse response = new StreamingResponse<>(
+ internalRequest.httpRequest.getRequestLine(),
+ streamRequest(nextNodes(), internalRequest)
+ );
+
+ return response;
+ }
+
/**
* Sends a request to the OpenSearch cluster that the client points to.
* Blocks until the request is completed and returns its response or fails
@@ -323,13 +348,13 @@ public Response performRequest(Request request) throws IOException {
private Response performRequest(final NodeTuple> nodeTuple, final InternalRequest request, Exception previousException)
throws IOException {
- RequestContext context = request.createContextForNextAttempt(nodeTuple.nodes.next(), nodeTuple.authCache);
+ RequestContext context = request.createContextForNextAttempt(nodeTuple.nodes.next(), nodeTuple.authCache);
HttpResponse httpResponse;
try {
- httpResponse = client.execute(context.requestProducer, context.asyncResponseConsumer, context.context, null).get();
+ httpResponse = client.execute(context.requestProducer(), context.asyncResponseConsumer(), context.context(), null).get();
} catch (Exception e) {
- RequestLogger.logFailedRequest(logger, request.httpRequest, context.node, e);
- onFailure(context.node);
+ RequestLogger.logFailedRequest(logger, request.httpRequest, context.node(), e);
+ onFailure(context.node());
Exception cause = extractAndWrapCause(e);
addSuppressedException(previousException, cause);
if (nodeTuple.nodes.hasNext()) {
@@ -343,7 +368,7 @@ private Response performRequest(final NodeTuple> nodeTuple, final
}
throw new IllegalStateException("unexpected exception type: must be either RuntimeException or IOException", cause);
}
- ResponseOrResponseException responseOrResponseException = convertResponse(request, context.node, httpResponse);
+ ResponseOrResponseException responseOrResponseException = convertResponse(request, context.node(), httpResponse);
if (responseOrResponseException.responseException == null) {
return responseOrResponseException.response;
}
@@ -354,6 +379,84 @@ private Response performRequest(final NodeTuple> nodeTuple, final
throw responseOrResponseException.responseException;
}
+ private Publisher>> streamRequest(
+ final NodeTuple> nodeTuple,
+ final InternalStreamingRequest request
+ ) throws IOException {
+ return request.cancellable.callIfNotCancelled(() -> {
+ final Node node = nodeTuple.nodes.next();
+
+ final Mono>> publisher = Mono.create(emitter -> {
+ final RequestContext context = request.createContextForNextAttempt(node, nodeTuple.authCache, emitter);
+ client.execute(context.requestProducer(), context.asyncResponseConsumer(), context.context(), null);
+ });
+
+ return publisher.flatMap(message -> {
+ try {
+ final ResponseOrResponseException responseOrResponseException = convertResponse(request, node, message);
+ if (responseOrResponseException.responseException == null) {
+ return Mono.just(
+ new Message<>(
+ message.getHead(),
+ Flux.from(message.getBody()).flatMapSequential(b -> Flux.fromIterable(frame(b)))
+ )
+ );
+ } else {
+ if (nodeTuple.nodes.hasNext()) {
+ return Mono.from(streamRequest(nodeTuple, request));
+ } else {
+ return Mono.error(responseOrResponseException.responseException);
+ }
+ }
+ } catch (final Exception ex) {
+ return Mono.error(ex);
+ }
+ });
+ });
+ }
+
+ /**
+ * Frame the {@link ByteBuffer} into individual chunks that are separated by '\r\n' sequence.
+ * @param b {@link ByteBuffer} to split
+ * @return individual chunks
+ */
+ private static Collection frame(ByteBuffer b) {
+ final Collection buffers = new ArrayList<>();
+
+ int position = b.position();
+ while (b.hasRemaining()) {
+ // Skip the chunk separator when it comes right at the beginning
+ if (b.get() == '\r' && b.hasRemaining() && b.position() > 1) {
+ if (b.get() == '\n') {
+ final byte[] chunk = new byte[b.position() - position];
+
+ b.position(position);
+ b.get(chunk);
+
+ // Do not copy the '\r\n' sequence
+ buffers.add(ByteBuffer.wrap(chunk, 0, chunk.length - 2));
+ position = b.position();
+ }
+ }
+ }
+
+ if (buffers.isEmpty()) {
+ return Collections.singleton(b);
+ }
+
+ // Copy last chunk
+ if (position != b.position()) {
+ final byte[] chunk = new byte[b.position() - position];
+
+ b.position(position);
+ b.get(chunk);
+
+ buffers.add(ByteBuffer.wrap(chunk, 0, chunk.length));
+ }
+
+ return buffers;
+ }
+
private ResponseOrResponseException convertResponse(InternalRequest request, Node node, HttpResponse httpResponse) throws IOException {
RequestLogger.logResponse(logger, request.httpRequest, node.getHost(), httpResponse);
int statusCode = httpResponse.getStatusLine().getStatusCode();
@@ -384,6 +487,37 @@ private ResponseOrResponseException convertResponse(InternalRequest request, Nod
throw responseException;
}
+ private ResponseOrResponseException convertResponse(
+ InternalStreamingRequest request,
+ Node node,
+ Message> message
+ ) throws IOException {
+
+ // Streaming Response could accumulate a lot of data so we may not be able to fully consume it.
+ final HttpResponse httpResponse = new BasicHttpResponse(message.getHead().getStatusLine());
+ final Response response = new Response(request.httpRequest.getRequestLine(), node.getHost(), httpResponse);
+
+ RequestLogger.logResponse(logger, request.httpRequest, node.getHost(), httpResponse);
+ int statusCode = httpResponse.getStatusLine().getStatusCode();
+
+ if (isSuccessfulResponse(statusCode) || request.ignoreErrorCodes.contains(response.getStatusLine().getStatusCode())) {
+ onResponse(node);
+ if (request.warningsHandler.warningsShouldFailRequest(response.getWarnings())) {
+ throw new WarningFailureException(response);
+ }
+ return new ResponseOrResponseException(response);
+ }
+ ResponseException responseException = new ResponseException(response);
+ if (isRetryStatus(statusCode)) {
+ // mark host dead and retry against next one
+ onFailure(node);
+ return new ResponseOrResponseException(responseException);
+ }
+ // mark host alive and don't retry, as the error should be a request problem
+ onResponse(node);
+ throw responseException;
+ }
+
/**
* Sends a request to the OpenSearch cluster that the client points to.
* The request is executed asynchronously and the provided
@@ -418,48 +552,57 @@ private void performRequestAsync(
final FailureTrackingResponseListener listener
) {
request.cancellable.runIfNotCancelled(() -> {
- final RequestContext context = request.createContextForNextAttempt(nodeTuple.nodes.next(), nodeTuple.authCache);
- client.execute(context.requestProducer, context.asyncResponseConsumer, context.context, new FutureCallback() {
- @Override
- public void completed(HttpResponse httpResponse) {
- try {
- ResponseOrResponseException responseOrResponseException = convertResponse(request, context.node, httpResponse);
- if (responseOrResponseException.responseException == null) {
- listener.onSuccess(responseOrResponseException.response);
- } else {
- if (nodeTuple.nodes.hasNext()) {
- listener.trackFailure(responseOrResponseException.responseException);
- performRequestAsync(nodeTuple, request, listener);
+ final RequestContext context = request.createContextForNextAttempt(nodeTuple.nodes.next(), nodeTuple.authCache);
+ client.execute(
+ context.requestProducer(),
+ context.asyncResponseConsumer(),
+ context.context(),
+ new FutureCallback() {
+ @Override
+ public void completed(HttpResponse httpResponse) {
+ try {
+ ResponseOrResponseException responseOrResponseException = convertResponse(
+ request,
+ context.node(),
+ httpResponse
+ );
+ if (responseOrResponseException.responseException == null) {
+ listener.onSuccess(responseOrResponseException.response);
} else {
- listener.onDefinitiveFailure(responseOrResponseException.responseException);
+ if (nodeTuple.nodes.hasNext()) {
+ listener.trackFailure(responseOrResponseException.responseException);
+ performRequestAsync(nodeTuple, request, listener);
+ } else {
+ listener.onDefinitiveFailure(responseOrResponseException.responseException);
+ }
}
+ } catch (Exception e) {
+ listener.onDefinitiveFailure(e);
}
- } catch (Exception e) {
- listener.onDefinitiveFailure(e);
}
- }
- @Override
- public void failed(Exception failure) {
- try {
- RequestLogger.logFailedRequest(logger, request.httpRequest, context.node, failure);
- onFailure(context.node);
- if (nodeTuple.nodes.hasNext()) {
- listener.trackFailure(failure);
- performRequestAsync(nodeTuple, request, listener);
- } else {
- listener.onDefinitiveFailure(failure);
+ @Override
+ public void failed(Exception failure) {
+ try {
+ RequestLogger.logFailedRequest(logger, request.httpRequest, context.node(), failure);
+ onFailure(context.node());
+ if (nodeTuple.nodes.hasNext()) {
+ listener.trackFailure(failure);
+ performRequestAsync(nodeTuple, request, listener);
+ } else {
+ listener.onDefinitiveFailure(failure);
+ }
+ } catch (Exception e) {
+ listener.onDefinitiveFailure(e);
}
- } catch (Exception e) {
- listener.onDefinitiveFailure(e);
}
- }
- @Override
- public void cancelled() {
- listener.onDefinitiveFailure(Cancellable.newCancellationException());
+ @Override
+ public void cancelled() {
+ listener.onDefinitiveFailure(Cancellable.newCancellationException());
+ }
}
- });
+ );
});
}
@@ -800,6 +943,66 @@ public void remove() {
}
}
+ private class InternalStreamingRequest {
+ private final StreamingRequest request;
+ private final Set ignoreErrorCodes;
+ private final HttpRequestBase httpRequest;
+ private final Cancellable cancellable;
+ private final WarningsHandler warningsHandler;
+
+ InternalStreamingRequest(StreamingRequest request) {
+ this.request = request;
+ Map params = new HashMap<>(request.getParameters());
+ // ignore is a special parameter supported by the clients, shouldn't be sent to es
+ String ignoreString = params.remove("ignore");
+ this.ignoreErrorCodes = getIgnoreErrorCodes(ignoreString, request.getMethod());
+ URI uri = buildUri(pathPrefix, request.getEndpoint(), params);
+ this.httpRequest = createHttpRequest(request.getMethod(), uri, null);
+ this.cancellable = Cancellable.fromRequest(httpRequest);
+ setHeaders(httpRequest, request.getOptions().getHeaders());
+ setRequestConfig(httpRequest, request.getOptions().getRequestConfig());
+ this.warningsHandler = request.getOptions().getWarningsHandler() == null
+ ? RestClient.this.warningsHandler
+ : request.getOptions().getWarningsHandler();
+ }
+
+ private void setHeaders(HttpRequest httpRequest, Collection requestHeaders) {
+ // request headers override default headers, so we don't add default headers if they exist as request headers
+ final Set requestNames = new HashSet<>(requestHeaders.size());
+ for (Header requestHeader : requestHeaders) {
+ httpRequest.addHeader(requestHeader);
+ requestNames.add(requestHeader.getName());
+ }
+ for (Header defaultHeader : defaultHeaders) {
+ if (requestNames.contains(defaultHeader.getName()) == false) {
+ httpRequest.addHeader(defaultHeader);
+ }
+ }
+ if (compressionEnabled) {
+ httpRequest.addHeader("Accept-Encoding", "gzip");
+ }
+ }
+
+ private void setRequestConfig(HttpRequestBase httpRequest, RequestConfig requestConfig) {
+ if (requestConfig != null) {
+ httpRequest.setConfig(requestConfig);
+ }
+ }
+
+ public Publisher getPublisher() {
+ return request.getBody();
+ }
+
+ RequestContext createContextForNextAttempt(
+ Node node,
+ AuthCache authCache,
+ MonoSink>> emitter
+ ) {
+ this.httpRequest.reset();
+ return new ReactiveRequestContext(this, node, authCache, emitter);
+ }
+ }
+
private class InternalRequest {
private final Request request;
private final Set ignoreErrorCodes;
@@ -846,19 +1049,89 @@ private void setRequestConfig(HttpRequestBase httpRequest, RequestConfig request
}
}
- RequestContext createContextForNextAttempt(Node node, AuthCache authCache) {
+ RequestContext createContextForNextAttempt(Node node, AuthCache authCache) {
this.httpRequest.reset();
- return new RequestContext(this, node, authCache);
+ return new AsyncRequestContext(this, node, authCache);
+ }
+ }
+
+ private interface RequestContext {
+ Node node();
+
+ HttpAsyncRequestProducer requestProducer();
+
+ HttpAsyncResponseConsumer asyncResponseConsumer();
+
+ HttpClientContext context();
+ }
+
+ private static class ReactiveRequestContext implements RequestContext {
+ private final Node node;
+ private final HttpAsyncRequestProducer requestProducer;
+ private final HttpAsyncResponseConsumer asyncResponseConsumer;
+ private final HttpClientContext context;
+
+ ReactiveRequestContext(
+ InternalStreamingRequest request,
+ Node node,
+ AuthCache authCache,
+ MonoSink>> emitter
+ ) {
+ this.node = node;
+ // we stream the request body if the entity allows for it
+ this.requestProducer = new ReactiveRequestProducer(request.httpRequest, node.getHost(), request.getPublisher());
+ this.asyncResponseConsumer = new ReactiveResponseConsumer(new FutureCallback>>() {
+ @Override
+ public void failed(Exception ex) {
+ emitter.error(ex);
+ }
+
+ @Override
+ public void completed(Message> result) {
+ if (result == null) {
+ emitter.success();
+ } else {
+ emitter.success(result);
+ }
+ }
+
+ @Override
+ public void cancelled() {
+ failed(new CancellationException("Future cancelled"));
+ }
+ });
+ this.context = HttpClientContext.create();
+ context.setAuthCache(authCache);
+ }
+
+ @Override
+ public HttpAsyncResponseConsumer asyncResponseConsumer() {
+ return asyncResponseConsumer;
+ }
+
+ @Override
+ public HttpClientContext context() {
+ return context;
+ }
+
+ @Override
+ public Node node() {
+ return node;
+ }
+
+ @Override
+ public HttpAsyncRequestProducer requestProducer() {
+ return requestProducer;
}
}
- private static class RequestContext {
+ private static class AsyncRequestContext implements RequestContext {
private final Node node;
private final HttpAsyncRequestProducer requestProducer;
private final HttpAsyncResponseConsumer asyncResponseConsumer;
private final HttpClientContext context;
- RequestContext(InternalRequest request, Node node, AuthCache authCache) {
+ AsyncRequestContext(InternalRequest request, Node node, AuthCache authCache) {
this.node = node;
// we stream the request body if the entity allows for it
this.requestProducer = HttpAsyncMethods.create(node.getHost(), request.httpRequest);
@@ -868,6 +1141,26 @@ private static class RequestContext {
this.context = HttpClientContext.create();
context.setAuthCache(authCache);
}
+
+ @Override
+ public HttpAsyncResponseConsumer asyncResponseConsumer() {
+ return asyncResponseConsumer;
+ }
+
+ @Override
+ public HttpClientContext context() {
+ return context;
+ }
+
+ @Override
+ public Node node() {
+ return node;
+ }
+
+ @Override
+ public HttpAsyncRequestProducer requestProducer() {
+ return requestProducer;
+ }
}
private static Set getIgnoreErrorCodes(String ignoreString, String requestMethod) {
diff --git a/client/rest/src/main/java/org/opensearch/client/StreamingRequest.java b/client/rest/src/main/java/org/opensearch/client/StreamingRequest.java
new file mode 100644
index 0000000000000..e1767407b1353
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/StreamingRequest.java
@@ -0,0 +1,114 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+import org.reactivestreams.Publisher;
+
+import static java.util.Collections.unmodifiableMap;
+
+/**
+ * HTTP Streaming Request to OpenSearch. This is an experimental API.
+ */
+public class StreamingRequest {
+ private final String method;
+ private final String endpoint;
+ private final Map parameters = new HashMap<>();
+
+ private RequestOptions options = RequestOptions.DEFAULT;
+ private final Publisher publisher;
+
+ /**
+ * Constructor
+ * @param method method
+ * @param endpoint endpoint
+ * @param publisher publisher
+ */
+ public StreamingRequest(String method, String endpoint, Publisher publisher) {
+ this.method = method;
+ this.endpoint = endpoint;
+ this.publisher = publisher;
+ }
+
+ /**
+ * Get endpoint
+ * @return endpoint
+ */
+ public String getEndpoint() {
+ return endpoint;
+ }
+
+ /**
+ * Get method
+ * @return method
+ */
+ public String getMethod() {
+ return method;
+ }
+
+ /**
+ * Get options
+ * @return options
+ */
+ public RequestOptions getOptions() {
+ return options;
+ }
+
+ /**
+ * Get parameters
+ * @return parameters
+ */
+ public Map getParameters() {
+ if (options.getParameters().isEmpty()) {
+ return unmodifiableMap(parameters);
+ } else {
+ Map combinedParameters = new HashMap<>(parameters);
+ combinedParameters.putAll(options.getParameters());
+ return unmodifiableMap(combinedParameters);
+ }
+ }
+
+ /**
+ * Add a query string parameter.
+ * @param name the name of the url parameter. Must not be null.
+ * @param value the value of the url url parameter. If {@code null} then
+ * the parameter is sent as {@code name} rather than {@code name=value}
+ * @throws IllegalArgumentException if a parameter with that name has
+ * already been set
+ */
+ public void addParameter(String name, String value) {
+ Objects.requireNonNull(name, "url parameter name cannot be null");
+ if (parameters.containsKey(name)) {
+ throw new IllegalArgumentException("url parameter [" + name + "] has already been set to [" + parameters.get(name) + "]");
+ } else {
+ parameters.put(name, value);
+ }
+ }
+
+ /**
+ * Add query parameters using the provided map of key value pairs.
+ *
+ * @param paramSource a map of key value pairs where the key is the url parameter.
+ * @throws IllegalArgumentException if a parameter with that name has already been set.
+ */
+ public void addParameters(Map paramSource) {
+ paramSource.forEach(this::addParameter);
+ }
+
+ /**
+ * Body publisher
+ * @return body publisher
+ */
+ public Publisher getBody() {
+ return publisher;
+ }
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/StreamingResponse.java b/client/rest/src/main/java/org/opensearch/client/StreamingResponse.java
new file mode 100644
index 0000000000000..4869187ab9261
--- /dev/null
+++ b/client/rest/src/main/java/org/opensearch/client/StreamingResponse.java
@@ -0,0 +1,94 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.client;
+
+import org.apache.http.HttpHost;
+import org.apache.http.HttpResponse;
+import org.apache.http.RequestLine;
+import org.apache.http.StatusLine;
+
+import java.util.List;
+
+import org.reactivestreams.Publisher;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+/**
+ * HTTP Streaming Response from OpenSearch. This is an experimental API.
+ */
+public class StreamingResponse {
+ private final RequestLine requestLine;
+ private final Mono>> publisher;
+ private volatile HttpHost host;
+
+ /**
+ * Constructor
+ * @param requestLine request line
+ * @param publisher message publisher(response with a body)
+ */
+ public StreamingResponse(RequestLine requestLine, Publisher>> publisher) {
+ this.requestLine = requestLine;
+ // We cache the publisher here so the body or / and HttpResponse could
+ // be consumed independently or/and more than once.
+ this.publisher = Mono.from(publisher).cache();
+ }
+
+ /**
+ * Set host
+ * @param host host
+ */
+ public void setHost(HttpHost host) {
+ this.host = host;
+ }
+
+ /**
+ * Get request line
+ * @return request line
+ */
+ public RequestLine getRequestLine() {
+ return requestLine;
+ }
+
+ /**
+ * Get host
+ * @return host
+ */
+ public HttpHost getHost() {
+ return host;
+ }
+
+ /**
+ * Get response boby {@link Publisher}
+ * @return response boby {@link Publisher}
+ */
+ public Publisher getBody() {
+ return publisher.flatMapMany(m -> Flux.from(m.getBody()));
+ }
+
+ /**
+ * Returns the status line of the current response
+ */
+ public StatusLine getStatusLine() {
+ return publisher.map(Message::getHead)
+ .onErrorResume(ResponseException.class, e -> Mono.just(e.getResponse().getHttpResponse()))
+ .map(HttpResponse::getStatusLine)
+ .block();
+ }
+
+ /**
+ * Returns a list of all warning headers returned in the response.
+ */
+ public List getWarnings() {
+ return ResponseWarningsExtractor.getWarnings(
+ publisher.map(Message::getHead)
+ .onErrorResume(ResponseException.class, e -> Mono.just(e.getResponse().getHttpResponse()))
+ .block()
+ );
+ }
+}
diff --git a/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java b/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java
index aaa40db1442ee..f1782a6a29795 100644
--- a/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java
+++ b/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java
@@ -38,12 +38,15 @@
import java.util.ArrayList;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertThrows;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
@@ -89,6 +92,39 @@ public void testAddHeader() {
}
}
+ public void testAddParameter() {
+ assertThrows(
+ "query parameter name cannot be null",
+ NullPointerException.class,
+ () -> randomBuilder().addParameter(null, randomAsciiLettersOfLengthBetween(3, 10))
+ );
+
+ assertThrows(
+ "query parameter value cannot be null",
+ NullPointerException.class,
+ () -> randomBuilder().addParameter(randomAsciiLettersOfLengthBetween(3, 10), null)
+ );
+
+ RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
+ int numParameters = between(0, 5);
+ Map parameters = new HashMap<>();
+ for (int i = 0; i < numParameters; i++) {
+ String name = randomAsciiAlphanumOfLengthBetween(5, 10);
+ String value = randomAsciiAlphanumOfLength(3);
+ parameters.put(name, value);
+ builder.addParameter(name, value);
+ }
+ RequestOptions options = builder.build();
+ assertEquals(parameters, options.getParameters());
+
+ try {
+ options.getParameters().put(randomAsciiAlphanumOfLengthBetween(5, 10), randomAsciiAlphanumOfLength(3));
+ fail("expected failure");
+ } catch (UnsupportedOperationException e) {
+ assertNull(e.getMessage());
+ }
+ }
+
public void testSetHttpAsyncResponseConsumerFactory() {
try {
RequestOptions.DEFAULT.toBuilder().setHttpAsyncResponseConsumerFactory(null);
@@ -144,6 +180,13 @@ static RequestOptions.Builder randomBuilder() {
}
}
+ if (randomBoolean()) {
+ int queryParamCount = between(1, 5);
+ for (int i = 0; i < queryParamCount; i++) {
+ builder.addParameter(randomAsciiAlphanumOfLength(3), randomAsciiAlphanumOfLength(3));
+ }
+ }
+
if (randomBoolean()) {
builder.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(1));
}
diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java
index ca761dcb6b9b6..cb5e856362fb7 100644
--- a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java
+++ b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java
@@ -55,12 +55,15 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
+import reactor.core.publisher.Mono;
+
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
@@ -417,6 +420,16 @@ public void testIsRunning() {
assertFalse(restClient.isRunning());
}
+ public void testStreamWithUnsupportedMethod() throws Exception {
+ try (RestClient restClient = createRestClient()) {
+ final UnsupportedOperationException ex = assertThrows(
+ UnsupportedOperationException.class,
+ () -> restClient.streamRequest(new StreamingRequest<>("unsupported", randomAsciiLettersOfLength(5), Mono.empty()))
+ );
+ assertEquals("http method not supported: unsupported", ex.getMessage());
+ }
+ }
+
private static void assertNodes(NodeTuple> nodeTuple, AtomicInteger lastNodeIndex, int runs) throws IOException {
int distance = lastNodeIndex.get() % nodeTuple.nodes.size();
/*
diff --git a/client/sniffer/licenses/jackson-core-2.17.1.jar.sha1 b/client/sniffer/licenses/jackson-core-2.17.1.jar.sha1
deleted file mode 100644
index 82dab5981e652..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.17.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e52a11644cd59a28ef79f02bddc2cc3bab45edb
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.17.2.jar.sha1 b/client/sniffer/licenses/jackson-core-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..e15f2340980bc
--- /dev/null
+++ b/client/sniffer/licenses/jackson-core-2.17.2.jar.sha1
@@ -0,0 +1 @@
+969a35cb35c86512acbadcdbbbfb044c877db814
\ No newline at end of file
diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle
index 161b8008525b4..792b1ab57ddbc 100644
--- a/distribution/archives/build.gradle
+++ b/distribution/archives/build.gradle
@@ -39,11 +39,17 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
with libFiles()
}
into('config') {
- dirMode 0750
- fileMode 0660
+ dirPermissions {
+ unix 0750
+ }
+ filePermissions {
+ unix 0660
+ }
with configFiles(distributionType, java)
from {
- dirMode 0750
+ dirPermissions {
+ unix 0750
+ }
jvmOptionsDir.getParent()
}
}
@@ -61,13 +67,17 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
}
into('') {
from {
- dirMode 0755
+ dirPermissions {
+ unix 0755
+ }
logsDir.getParent()
}
}
into('') {
from {
- dirMode 0755
+ dirPermissions {
+ unix 0755
+ }
pluginsDir.getParent()
}
}
diff --git a/distribution/build.gradle b/distribution/build.gradle
index bbbef40f51c57..a323dd15ed9cf 100644
--- a/distribution/build.gradle
+++ b/distribution/build.gradle
@@ -363,9 +363,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
if (it.relativePath.segments[-2] == 'bin' || (platform == 'darwin-x64' && it.relativePath.segments[-2] == 'MacOS')) {
// bin files, wherever they are within modules (eg platform specific) should be executable
// and MacOS is an alternative to bin on macOS
- it.mode = 0755
+ it.permissions(perm -> perm.unix(0755))
} else {
- it.mode = 0644
+ it.permissions(perm -> perm.unix(0644))
}
}
def buildModules = buildModulesTaskProvider
@@ -413,7 +413,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
from '../src/bin'
exclude '*.exe'
exclude '*.bat'
- eachFile { it.setMode(0755) }
+ eachFile { it.permissions(perm -> perm.unix(0755)) }
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, java))
}
// windows files, only for zip
@@ -431,7 +431,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
// module provided bin files
with copySpec {
- eachFile { it.setMode(0755) }
+ eachFile { it.permissions(perm -> perm.unix(0755)) }
from project(':distribution').buildBin
if (distributionType != 'zip') {
exclude '*.bat'
@@ -473,7 +473,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
eachFile { FileCopyDetails details ->
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
- details.mode = 0755
+ details.permissions(perm -> perm.unix(0755))
}
if (details.name == 'src.zip') {
details.exclude()
@@ -501,7 +501,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
eachFile { FileCopyDetails details ->
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
- details.mode = 0755
+ details.permissions(perm -> perm.unix(0755))
}
}
}
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index 49319789e04e9..659b25129b23c 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -63,7 +63,7 @@ import java.util.regex.Pattern
*/
plugins {
- id "com.netflix.nebula.ospackage-base" version "11.9.0"
+ id "com.netflix.nebula.ospackage-base" version "11.10.0"
}
void addProcessFilesTask(String type, boolean jdk) {
@@ -160,7 +160,9 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
}
from(rootProject.projectDir) {
include 'README.md'
- fileMode 0644
+ filePermissions {
+ unix 0644
+ }
}
into('lib') {
with libFiles()
@@ -183,9 +185,9 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
directory('/' + segments[0..i].join('/'), 0755)
}
if (segments[-2] == 'bin' || segments[-1] == 'jspawnhelper') {
- fcp.mode = 0755
+ fcp.permissions(perm -> perm.unix(0755))
} else {
- fcp.mode = 0644
+ fcp.permissions(perm -> perm.unix(0644))
}
}
}
@@ -195,7 +197,9 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
if (type == 'deb') {
into("/usr/share/doc/${packageName}") {
from "${packagingFiles}/copyright"
- fileMode 0644
+ filePermissions {
+ unix 0644
+ }
}
} else {
assert type == 'rpm'
@@ -204,7 +208,9 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
include 'APACHE-LICENSE-2.0.txt'
rename { 'LICENSE.txt' }
}
- fileMode 0644
+ filePermissions {
+ unix 0644
+ }
}
}
@@ -213,7 +219,9 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
configurationFile '/etc/opensearch/jvm.options'
configurationFile '/etc/opensearch/log4j2.properties'
from("${packagingFiles}") {
- dirMode 0750
+ dirPermissions {
+ unix 0750
+ }
into('/etc')
permissionGroup 'opensearch'
includeEmptyDirs true
@@ -223,8 +231,12 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
}
from("${packagingFiles}/etc/opensearch") {
into('/etc/opensearch')
- dirMode 0750
- fileMode 0660
+ dirPermissions {
+ unix 0750
+ }
+ filePermissions{
+ unix 0660
+ }
permissionGroup 'opensearch'
includeEmptyDirs true
createDirectoryEntry true
@@ -235,34 +247,46 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
into(new File(envFile).getParent()) {
fileType CONFIG | NOREPLACE
permissionGroup 'opensearch'
- fileMode 0660
+ filePermissions {
+ unix 0660
+ }
from "${packagingFiles}/env/opensearch"
}
// ========= systemd =========
into('/usr/lib/tmpfiles.d') {
from "${packagingFiles}/systemd/opensearch.conf"
- fileMode 0644
+ filePermissions {
+ unix 0644
+ }
}
into('/usr/lib/systemd/system') {
fileType CONFIG | NOREPLACE
from "${packagingFiles}/systemd/opensearch.service"
- fileMode 0644
+ filePermissions {
+ unix 0644
+ }
}
into('/usr/lib/sysctl.d') {
fileType CONFIG | NOREPLACE
from "${packagingFiles}/systemd/sysctl/opensearch.conf"
- fileMode 0644
+ filePermissions {
+ unix 0644
+ }
}
into('/usr/share/opensearch/bin') {
from "${packagingFiles}/systemd/systemd-entrypoint"
- fileMode 0755
+ filePermissions {
+ unix 0755
+ }
}
// ========= sysV init =========
configurationFile '/etc/init.d/opensearch'
into('/etc/init.d') {
- fileMode 0750
+ filePermissions {
+ unix 0750
+ }
fileType CONFIG | NOREPLACE
from "${packagingFiles}/init.d/opensearch"
}
@@ -278,7 +302,9 @@ Closure commonPackageConfig(String type, boolean jdk, String architecture) {
createDirectoryEntry true
user u
permissionGroup g
- dirMode mode
+ dirPermissions {
+ unix mode
+ }
}
}
copyEmptyDir('/var/log/opensearch', 'opensearch', 'opensearch', 0750)
@@ -341,7 +367,9 @@ Closure commonDebConfig(boolean jdk, String architecture) {
into('/usr/share/lintian/overrides') {
from('src/deb/lintian/opensearch')
- fileMode 0644
+ filePermissions {
+ unix 0644
+ }
}
}
}
diff --git a/distribution/src/config/opensearch.yml b/distribution/src/config/opensearch.yml
index 10bab9b3fce92..ce8d9079049e5 100644
--- a/distribution/src/config/opensearch.yml
+++ b/distribution/src/config/opensearch.yml
@@ -125,3 +125,7 @@ ${path.logs}
# Gates the functionality of enabling Opensearch to use pluggable caches with respective store names via setting.
#
#opensearch.experimental.feature.pluggable.caching.enabled: false
+#
+# Gates the functionality of star tree index, which improves the performance of search aggregations.
+#
+#opensearch.experimental.feature.composite_index.star_tree.enabled: false
diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle
index 3083ad4375460..784cdc457a1a9 100644
--- a/distribution/tools/plugin-cli/build.gradle
+++ b/distribution/tools/plugin-cli/build.gradle
@@ -37,8 +37,8 @@ base {
dependencies {
compileOnly project(":server")
compileOnly project(":libs:opensearch-cli")
- api "org.bouncycastle:bcpg-fips:1.0.7.1"
- api "org.bouncycastle:bc-fips:1.0.2.5"
+ api "org.bouncycastle:bcpg-fips:2.0.9"
+ api "org.bouncycastle:bc-fips:2.0.0"
testImplementation project(":test:framework")
testImplementation 'com.google.jimfs:jimfs:1.3.0'
testRuntimeOnly("com.google.guava:guava:${versions.guava}") {
@@ -58,33 +58,6 @@ test {
jvmArgs += [ "-Djava.security.egd=file:/dev/urandom" ]
}
-/*
- * these two classes intentionally use the following JDK internal APIs in order to offer the necessary
- * functionality
- *
- * sun.security.internal.spec.TlsKeyMaterialParameterSpec
- * sun.security.internal.spec.TlsKeyMaterialSpec
- * sun.security.internal.spec.TlsMasterSecretParameterSpec
- * sun.security.internal.spec.TlsPrfParameterSpec
- * sun.security.internal.spec.TlsRsaPremasterSecretParameterSpec
- * sun.security.provider.SecureRandom
- *
- */
-thirdPartyAudit.ignoreViolations(
- 'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider$CoreSecureRandom',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$BaseTLSKeyGeneratorSpi',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator$2',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator$2',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSPRFKeyGenerator',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator$2',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSExtendedMasterSecretGenerator',
- 'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSExtendedMasterSecretGenerator$2'
-)
-
thirdPartyAudit.ignoreMissingClasses(
'org.brotli.dec.BrotliInputStream',
'org.objectweb.asm.AnnotationVisitor',
diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.5.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.5.jar.sha1
deleted file mode 100644
index 1b44c77dd4ee1..0000000000000
--- a/distribution/tools/plugin-cli/licenses/bc-fips-1.0.2.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-704e65f7e4fe679e5ab2aa8a840f27f8ced4c522
\ No newline at end of file
diff --git a/distribution/tools/plugin-cli/licenses/bc-fips-2.0.0.jar.sha1 b/distribution/tools/plugin-cli/licenses/bc-fips-2.0.0.jar.sha1
new file mode 100644
index 0000000000000..79f0e3e9930bb
--- /dev/null
+++ b/distribution/tools/plugin-cli/licenses/bc-fips-2.0.0.jar.sha1
@@ -0,0 +1 @@
+ee9ac432cf08f9a9ebee35d7cf8a45f94959a7ab
\ No newline at end of file
diff --git a/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.7.1.jar.sha1 b/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.7.1.jar.sha1
deleted file mode 100644
index 44cebc7c92d87..0000000000000
--- a/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.7.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e1952428655ea822066f86df2e3ecda8fa0ba2b
\ No newline at end of file
diff --git a/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1 b/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1
new file mode 100644
index 0000000000000..20cdbf6dc8aa8
--- /dev/null
+++ b/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1
@@ -0,0 +1 @@
+f69719ef8dbf34d5f906ce480496446b2fd2ae27
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.17.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.17.1.jar.sha1
deleted file mode 100644
index 4ceead1b7ae4f..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.17.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fca7ef6192c9ad05d07bc50da991bf937a84af3a
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.17.2.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..411e1d62459fd
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.17.2.jar.sha1
@@ -0,0 +1 @@
+147b7b9412ffff24339f8aba080b292448e08698
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.17.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.17.1.jar.sha1
deleted file mode 100644
index 7cf1ac1b60301..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.17.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0524dcbcccdde7d45a679dfc333e4763feb09079
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.17.2.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..f2b4dbdc5decb
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.17.2.jar.sha1
@@ -0,0 +1 @@
+e6deb029e5901e027c129341fac39e515066b68c
\ No newline at end of file
diff --git a/gradle.properties b/gradle.properties
index 7c359ed2b652c..4e8c5b98116c1 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -22,7 +22,7 @@ org.gradle.jvmargs=-Xmx3g -XX:+HeapDumpOnOutOfMemoryError -Xss2m \
options.forkOptions.memoryMaximumSize=3g
# Disable Gradle Enterprise Gradle plugin's test retry
-systemProp.gradle.enterprise.testretry.enabled=false
+systemProp.develocity.testretry.enabled.enabled=false
# Disable duplicate project id detection
# See https://docs.gradle.org/current/userguide/upgrading_version_6.html#duplicate_project_names_may_cause_publication_to_fail
diff --git a/gradle/ide.gradle b/gradle/ide.gradle
index 14d6b2982ccd0..ea353f8d92bdd 100644
--- a/gradle/ide.gradle
+++ b/gradle/ide.gradle
@@ -28,7 +28,7 @@ allprojects {
apply plugin: 'idea'
tasks.named('idea').configure {
- doFirst { throw new GradleException("Use of the 'idea' task has been deprecated. For details on importing into IntelliJ see CONTRIBUTING.md.") }
+ doFirst { throw new GradleException("Use of the 'idea' task has been deprecated. For details on importing into IntelliJ see DEVELOPER_GUIDE.md.") }
}
}
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index d64cd4917707c..a4b76b9530d66 100644
Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 9b0d73222260e..39a291b258efb 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0
+distributionSha256Sum=682b4df7fe5accdca84a4d1ef6a3a6ab096b3efd5edf7de2bd8c758d95a93703
diff --git a/gradlew b/gradlew
index 1aa94a4269074..f5feea6d6b116 100755
--- a/gradlew
+++ b/gradlew
@@ -15,6 +15,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+# SPDX-License-Identifier: Apache-2.0
+#
##############################################################################
#
@@ -55,7 +57,7 @@
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
-# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
+# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
@@ -84,7 +86,8 @@ done
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
-APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
+APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
+' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
diff --git a/gradlew.bat b/gradlew.bat
index 6689b85beecde..9b42019c7915b 100644
--- a/gradlew.bat
+++ b/gradlew.bat
@@ -13,6 +13,8 @@
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
+@rem SPDX-License-Identifier: Apache-2.0
+@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@@ -43,11 +45,11 @@ set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
-echo.
-echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
+echo. 1>&2
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
+echo. 1>&2
+echo Please set the JAVA_HOME variable in your environment to match the 1>&2
+echo location of your Java installation. 1>&2
goto fail
@@ -57,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
-echo.
-echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
+echo. 1>&2
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
+echo. 1>&2
+echo Please set the JAVA_HOME variable in your environment to match the 1>&2
+echo location of your Java installation. 1>&2
goto fail
diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
index 569f48a8465f3..94ec0db3a9712 100644
--- a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
+++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
@@ -59,6 +59,7 @@ public class ApiAnnotationProcessor extends AbstractProcessor {
private static final String OPENSEARCH_PACKAGE = "org.opensearch";
private final Set reported = new HashSet<>();
+ private final Set validated = new HashSet<>();
private final Set processed = new HashSet<>();
private Kind reportFailureAs = Kind.ERROR;
@@ -85,6 +86,8 @@ public boolean process(Set extends TypeElement> annotations, RoundEnvironment
);
for (var element : elements) {
+ validate(element);
+
if (!checkPackage(element)) {
continue;
}
@@ -100,6 +103,64 @@ public boolean process(Set extends TypeElement> annotations, RoundEnvironment
return false;
}
+ private void validate(Element element) {
+ // The element was validated already
+ if (validated.contains(element)) {
+ return;
+ }
+
+ validated.add(element);
+
+ final PublicApi publicApi = element.getAnnotation(PublicApi.class);
+ if (publicApi != null) {
+ if (!validateVersion(publicApi.since())) {
+ processingEnv.getMessager()
+ .printMessage(
+ reportFailureAs,
+ "The type " + element + " has @PublicApi annotation with unparseable OpenSearch version: " + publicApi.since()
+ );
+ }
+ }
+
+ final DeprecatedApi deprecatedApi = element.getAnnotation(DeprecatedApi.class);
+ if (deprecatedApi != null) {
+ if (!validateVersion(deprecatedApi.since())) {
+ processingEnv.getMessager()
+ .printMessage(
+ reportFailureAs,
+ "The type "
+ + element
+ + " has @DeprecatedApi annotation with unparseable OpenSearch version: "
+ + deprecatedApi.since()
+ );
+ }
+ }
+ }
+
+ private boolean validateVersion(String version) {
+ String[] parts = version.split("[.-]");
+ if (parts.length < 3 || parts.length > 4) {
+ return false;
+ }
+
+ int major = Integer.parseInt(parts[0]);
+ if (major > 3 || major < 0) {
+ return false;
+ }
+
+ int minor = Integer.parseInt(parts[1]);
+ if (minor < 0) {
+ return false;
+ }
+
+ int patch = Integer.parseInt(parts[2]);
+ if (patch < 0) {
+ return false;
+ }
+
+ return true;
+ }
+
/**
* Check top level executable element
* @param executable top level executable element
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
index 8d8a4c7895339..716dcc3b9015f 100644
--- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
@@ -473,4 +473,48 @@ public void testPublicApiWithProtectedInterface() {
assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
}
+
+ /**
+ * The constructor arguments have relaxed semantics at the moment: those could be not annotated or be annotated as {@link InternalApi}
+ */
+ public void testPublicApiConstructorAnnotatedInternalApi() {
+ final CompilerResult result = compile("PublicApiConstructorAnnotatedInternalApi.java", "NotAnnotated.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
+ public void testPublicApiUnparseableVersion() {
+ final CompilerResult result = compile("PublicApiAnnotatedUnparseable.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(3));
+
+ assertThat(
+ failure.diagnotics(),
+ hasItem(
+ matching(
+ Diagnostic.Kind.ERROR,
+ containsString(
+ "The type org.opensearch.common.annotation.processor.PublicApiAnnotatedUnparseable has @PublicApi annotation with unparseable OpenSearch version: 2.x"
+ )
+ )
+ )
+ );
+ }
+
+ public void testPublicApiWithDeprecatedApiMethod() {
+ final CompilerResult result = compile("PublicApiWithDeprecatedApiMethod.java");
+ assertThat(result, instanceOf(Failure.class));
+
+ final Failure failure = (Failure) result;
+ assertThat(failure.diagnotics(), hasSize(2));
+
+ assertThat(failure.diagnotics(), not(hasItem(matching(Diagnostic.Kind.ERROR))));
+ }
+
}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/InternalApiAnnotated.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/InternalApiAnnotated.java
index 9996ba8b736aa..b0b542e127285 100644
--- a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/InternalApiAnnotated.java
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/InternalApiAnnotated.java
@@ -8,9 +8,9 @@
package org.opensearch.common.annotation.processor;
-import org.opensearch.common.annotation.PublicApi;
+import org.opensearch.common.annotation.InternalApi;
-@PublicApi(since = "1.0.0")
+@InternalApi
public class InternalApiAnnotated {
}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiAnnotatedUnparseable.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiAnnotatedUnparseable.java
new file mode 100644
index 0000000000000..44779450c9fd1
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiAnnotatedUnparseable.java
@@ -0,0 +1,16 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "2.x")
+public class PublicApiAnnotatedUnparseable {
+
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorAnnotatedInternalApi.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorAnnotatedInternalApi.java
new file mode 100644
index 0000000000000..d355a6b770391
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiConstructorAnnotatedInternalApi.java
@@ -0,0 +1,21 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiConstructorAnnotatedInternalApi {
+ /**
+ * The constructors have relaxed semantics at the moment: those could be not annotated or be annotated as {@link InternalApi}
+ */
+ @InternalApi
+ public PublicApiConstructorAnnotatedInternalApi(NotAnnotated arg) {}
+}
diff --git a/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithDeprecatedApiMethod.java b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithDeprecatedApiMethod.java
new file mode 100644
index 0000000000000..3cb28d3360830
--- /dev/null
+++ b/libs/common/src/test/resources/org/opensearch/common/annotation/processor/PublicApiWithDeprecatedApiMethod.java
@@ -0,0 +1,20 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.annotation.processor;
+
+import org.opensearch.common.annotation.DeprecatedApi;
+import org.opensearch.common.annotation.PublicApi;
+
+@PublicApi(since = "1.0.0")
+public class PublicApiWithDeprecatedApiMethod {
+ @DeprecatedApi(since = "0.1.0")
+ public void method() {
+
+ }
+}
diff --git a/libs/core/licenses/jackson-core-2.17.1.jar.sha1 b/libs/core/licenses/jackson-core-2.17.1.jar.sha1
deleted file mode 100644
index 82dab5981e652..0000000000000
--- a/libs/core/licenses/jackson-core-2.17.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e52a11644cd59a28ef79f02bddc2cc3bab45edb
\ No newline at end of file
diff --git a/libs/core/licenses/jackson-core-2.17.2.jar.sha1 b/libs/core/licenses/jackson-core-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..e15f2340980bc
--- /dev/null
+++ b/libs/core/licenses/jackson-core-2.17.2.jar.sha1
@@ -0,0 +1 @@
+969a35cb35c86512acbadcdbbbfb044c877db814
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.10.0.jar.sha1 b/libs/core/licenses/lucene-core-9.10.0.jar.sha1
deleted file mode 100644
index 31b6bcd29f418..0000000000000
--- a/libs/core/licenses/lucene-core-9.10.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-64e5624754d59386be5d9159c68f81ff96298704
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.11.1.jar.sha1 b/libs/core/licenses/lucene-core-9.11.1.jar.sha1
new file mode 100644
index 0000000000000..82de964a2b755
--- /dev/null
+++ b/libs/core/licenses/lucene-core-9.11.1.jar.sha1
@@ -0,0 +1 @@
+8f52ba14b21774f41ce33cf5ca111cbdefeed7f9
\ No newline at end of file
diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java
index be34553aa214c..4ab6542654f05 100644
--- a/libs/core/src/main/java/org/opensearch/Version.java
+++ b/libs/core/src/main/java/org/opensearch/Version.java
@@ -97,6 +97,9 @@ public class Version implements Comparable, ToXContentFragment {
public static final Version V_1_3_15 = new Version(1031599, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_16 = new Version(1031699, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_17 = new Version(1031799, org.apache.lucene.util.Version.LUCENE_8_10_1);
+ public static final Version V_1_3_18 = new Version(1031899, org.apache.lucene.util.Version.LUCENE_8_10_1);
+ public static final Version V_1_3_19 = new Version(1031999, org.apache.lucene.util.Version.LUCENE_8_10_1);
+ public static final Version V_1_3_20 = new Version(1032099, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_9_1_0);
public static final Version V_2_0_1 = new Version(2000199, org.apache.lucene.util.Version.LUCENE_9_1_0);
public static final Version V_2_0_2 = new Version(2000299, org.apache.lucene.util.Version.LUCENE_9_1_0);
@@ -132,7 +135,12 @@ public class Version implements Comparable, ToXContentFragment {
public static final Version V_2_14_0 = new Version(2140099, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version V_2_14_1 = new Version(2140199, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version V_2_15_0 = new Version(2150099, org.apache.lucene.util.Version.LUCENE_9_10_0);
- public static final Version CURRENT = V_2_15_0;
+ public static final Version V_2_15_1 = new Version(2150199, org.apache.lucene.util.Version.LUCENE_9_10_0);
+ public static final Version V_2_16_0 = new Version(2160099, org.apache.lucene.util.Version.LUCENE_9_11_1);
+ public static final Version V_2_16_1 = new Version(2160199, org.apache.lucene.util.Version.LUCENE_9_11_1);
+ public static final Version V_2_17_0 = new Version(2170099, org.apache.lucene.util.Version.LUCENE_9_11_1);
+ public static final Version V_2_18_0 = new Version(2180099, org.apache.lucene.util.Version.LUCENE_9_11_1);
+ public static final Version CURRENT = V_2_18_0;
public static Version fromId(int id) {
final Version known = LegacyESVersion.idToVersion.get(id);
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
index b7599265aece3..cac8ddc8f94e3 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
@@ -633,7 +633,7 @@ public final void writeMapOfLists(final Map> map, final Writer
* @param keyWriter The key writer
* @param valueWriter The value writer
*/
- public final void writeMap(final Map map, final Writer keyWriter, final Writer valueWriter) throws IOException {
+ public void writeMap(final Map map, final Writer keyWriter, final Writer valueWriter) throws IOException {
writeVInt(map.size());
for (final Map.Entry entry : map.entrySet()) {
keyWriter.write(this, entry.getKey());
@@ -969,9 +969,13 @@ public void writeOptionalArray(@Nullable T[] array) throws
}
public void writeOptionalWriteable(@Nullable Writeable writeable) throws IOException {
+ writeOptionalWriteable((out, writable) -> writable.writeTo(out), writeable);
+ }
+
+ public void writeOptionalWriteable(final Writer writer, @Nullable T writeable) throws IOException {
if (writeable != null) {
writeBoolean(true);
- writeable.writeTo(this);
+ writer.write(this, writeable);
} else {
writeBoolean(false);
}
diff --git a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
index af09a7aebba79..711f56c9f3e3b 100644
--- a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
+++ b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
@@ -78,6 +78,19 @@ public static Compressor compressor(final BytesReference bytes) {
return null;
}
+ /**
+ * @param bytes The bytes to check the compression for
+ * @return The detected compressor. If no compressor detected then return NoneCompressor.
+ */
+ public static Compressor compressorForWritable(final BytesReference bytes) {
+ for (Compressor compressor : registeredCompressors.values()) {
+ if (compressor.isCompressed(bytes) == true) {
+ return compressor;
+ }
+ }
+ return CompressorRegistry.none();
+ }
+
/** Decompress the provided {@link BytesReference}. */
public static BytesReference uncompress(BytesReference bytes) throws IOException {
Compressor compressor = compressor(bytes);
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
index a278b61894a65..e7b51c3389b52 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
@@ -104,6 +104,10 @@ public long getTotalValue() {
return endValue.get() - startValue;
}
+ public long getStartValue() {
+ return startValue;
+ }
+
@Override
public String toString() {
return String.valueOf(getTotalValue());
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java
new file mode 100644
index 0000000000000..373cdbfa7e9a1
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java
@@ -0,0 +1,225 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.core.tasks.resourcetracker;
+
+import org.opensearch.common.annotation.PublicApi;
+import org.opensearch.core.ParseField;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.common.io.stream.StreamOutput;
+import org.opensearch.core.common.io.stream.Writeable;
+import org.opensearch.core.xcontent.ConstructingObjectParser;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.opensearch.core.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Task resource usage information with minimal information about the task
+ *
+ * Writeable TaskResourceInfo objects are used to represent resource usage
+ * information of running tasks, which can be propagated to coordinator node
+ * to infer query-level resource usage
+ *
+ * @opensearch.api
+ */
+@PublicApi(since = "2.15.0")
+public class TaskResourceInfo implements Writeable, ToXContentObject {
+ private final String action;
+ private final long taskId;
+ private final long parentTaskId;
+ private final String nodeId;
+ private final TaskResourceUsage taskResourceUsage;
+
+ private static final ParseField ACTION = new ParseField("action");
+ private static final ParseField TASK_ID = new ParseField("taskId");
+ private static final ParseField PARENT_TASK_ID = new ParseField("parentTaskId");
+ private static final ParseField NODE_ID = new ParseField("nodeId");
+ private static final ParseField TASK_RESOURCE_USAGE = new ParseField("taskResourceUsage");
+
+ public TaskResourceInfo(
+ final String action,
+ final long taskId,
+ final long parentTaskId,
+ final String nodeId,
+ final TaskResourceUsage taskResourceUsage
+ ) {
+ this.action = action;
+ this.taskId = taskId;
+ this.parentTaskId = parentTaskId;
+ this.nodeId = nodeId;
+ this.taskResourceUsage = taskResourceUsage;
+ }
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "task_resource_info",
+ a -> new Builder().setAction((String) a[0])
+ .setTaskId((Long) a[1])
+ .setParentTaskId((Long) a[2])
+ .setNodeId((String) a[3])
+ .setTaskResourceUsage((TaskResourceUsage) a[4])
+ .build()
+ );
+
+ static {
+ PARSER.declareString(constructorArg(), ACTION);
+ PARSER.declareLong(constructorArg(), TASK_ID);
+ PARSER.declareLong(constructorArg(), PARENT_TASK_ID);
+ PARSER.declareString(constructorArg(), NODE_ID);
+ PARSER.declareObject(constructorArg(), TaskResourceUsage.PARSER, TASK_RESOURCE_USAGE);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(ACTION.getPreferredName(), this.action);
+ builder.field(TASK_ID.getPreferredName(), this.taskId);
+ builder.field(PARENT_TASK_ID.getPreferredName(), this.parentTaskId);
+ builder.field(NODE_ID.getPreferredName(), this.nodeId);
+ builder.startObject(TASK_RESOURCE_USAGE.getPreferredName());
+ this.taskResourceUsage.toXContent(builder, params);
+ builder.endObject();
+ builder.endObject();
+ return builder;
+ }
+
+ /**
+ * Builder for {@link TaskResourceInfo}
+ */
+ public static class Builder {
+ private TaskResourceUsage taskResourceUsage;
+ private String action;
+ private long taskId;
+ private long parentTaskId;
+ private String nodeId;
+
+ public Builder setTaskResourceUsage(final TaskResourceUsage taskResourceUsage) {
+ this.taskResourceUsage = taskResourceUsage;
+ return this;
+ }
+
+ public Builder setAction(final String action) {
+ this.action = action;
+ return this;
+ }
+
+ public Builder setTaskId(final long taskId) {
+ this.taskId = taskId;
+ return this;
+ }
+
+ public Builder setParentTaskId(final long parentTaskId) {
+ this.parentTaskId = parentTaskId;
+ return this;
+ }
+
+ public Builder setNodeId(final String nodeId) {
+ this.nodeId = nodeId;
+ return this;
+ }
+
+ public TaskResourceInfo build() {
+ return new TaskResourceInfo(action, taskId, parentTaskId, nodeId, taskResourceUsage);
+ }
+ }
+
+ /**
+ * Read task info from a stream.
+ *
+ * @param in StreamInput to read
+ * @return {@link TaskResourceInfo}
+ * @throws IOException IOException
+ */
+ public static TaskResourceInfo readFromStream(StreamInput in) throws IOException {
+ return new TaskResourceInfo.Builder().setAction(in.readString())
+ .setTaskId(in.readLong())
+ .setParentTaskId(in.readLong())
+ .setNodeId(in.readString())
+ .setTaskResourceUsage(TaskResourceUsage.readFromStream(in))
+ .build();
+ }
+
+ /**
+ * Get TaskResourceUsage
+ *
+ * @return taskResourceUsage
+ */
+ public TaskResourceUsage getTaskResourceUsage() {
+ return taskResourceUsage;
+ }
+
+ /**
+ * Get parent task id
+ *
+ * @return parent task id
+ */
+ public long getParentTaskId() {
+ return parentTaskId;
+ }
+
+ /**
+ * Get task id
+ * @return task id
+ */
+ public long getTaskId() {
+ return taskId;
+ }
+
+ /**
+ * Get node id
+ * @return node id
+ */
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ /**
+ * Get task action
+ * @return task action
+ */
+ public String getAction() {
+ return action;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(action);
+ out.writeLong(taskId);
+ out.writeLong(parentTaskId);
+ out.writeString(nodeId);
+ taskResourceUsage.writeTo(out);
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(MediaTypeRegistry.JSON, this);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || obj.getClass() != TaskResourceInfo.class) {
+ return false;
+ }
+ TaskResourceInfo other = (TaskResourceInfo) obj;
+ return action.equals(other.action)
+ && taskId == other.taskId
+ && parentTaskId == other.parentTaskId
+ && Objects.equals(nodeId, other.nodeId)
+ && taskResourceUsage.equals(other.taskResourceUsage);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(action, taskId, parentTaskId, nodeId, taskResourceUsage);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractXContentParser.java b/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractXContentParser.java
index 4efaacecd0e67..4605aa684db1c 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractXContentParser.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/AbstractXContentParser.java
@@ -375,7 +375,7 @@ private static void skipToListStart(XContentParser parser) throws IOException {
}
}
- // read a list without bounds checks, assuming the the current parser is always on an array start
+ // read a list without bounds checks, assuming the current parser is always on an array start
private static List