diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 219b242a..100c210f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ # This should match the owning team set up in https://github.com/orgs/opensearch-project/teams -* @opensearch-project/job-scheduler \ No newline at end of file +* @joshpalis @saratvemulapalli @dbwiddis @kaituo @vibrantvarun @cwperks @prudhvigodithi diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..f8881e1f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,18 @@ +version: 2 +updates: + - package-ecosystem: "gradle" + directory: "/" + schedule: + interval: "weekly" + commit-message: + prefix: "dependabot:" + ignore: + # For all packages, ignore all major versions to minimize breaking issues + - dependency-name: "*" + update-types: ["version-update:semver-major"] + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + commit-message: + prefix: "dependabot:" diff --git a/.github/workflows/add-untriaged.yml b/.github/workflows/add-untriaged.yml index 15b9a556..864fd26d 100644 --- a/.github/workflows/add-untriaged.yml +++ b/.github/workflows/add-untriaged.yml @@ -8,7 +8,7 @@ jobs: apply-label: runs-on: ubuntu-latest steps: - - uses: actions/github-script@v6 + - uses: actions/github-script@v7 with: script: | github.rest.issues.addLabels({ diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml new file mode 100644 index 00000000..86dae176 --- /dev/null +++ b/.github/workflows/auto-release.yml @@ -0,0 +1,28 @@ +name: Releases + +on: + push: + tags: + - '*' + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v2.1.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + - name: Get tag + id: tag + uses: dawidd6/action-get-tag@v1 + - uses: actions/checkout@v4 + - uses: ncipollo/release-action@v1 + with: + github_token: ${{ steps.github_app_token.outputs.token }} + bodyFile: release-notes/opensearch.job-scheduler.release-notes-${{steps.tag.outputs.tag}}.md diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index e47d8d88..374d4d98 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -7,6 +7,7 @@ on: jobs: backport: + if: github.event.pull_request.merged == true runs-on: ubuntu-latest permissions: contents: write @@ -15,14 +16,15 @@ jobs: steps: - name: GitHub App token id: github_app_token - uses: tibdex/github-app-token@v1.5.0 + uses: tibdex/github-app-token@v2.1.0 with: app_id: ${{ secrets.APP_ID }} private_key: ${{ secrets.APP_PRIVATE_KEY }} installation_id: 22958780 - name: Backport - uses: VachaShah/backport@v1.1.4 + uses: VachaShah/backport@v2.2.0 with: github_token: ${{ steps.github_app_token.outputs.token }} - branch_name: backport/backport-${{ github.event.number }} + head_template: backport/backport-<%= number %>-to-<%= base %> + failure_labels: backport-failed diff --git a/.github/workflows/bwc-test-workflow.yml b/.github/workflows/bwc-test-workflow.yml index f38abb23..718974bc 100644 --- a/.github/workflows/bwc-test-workflow.yml +++ b/.github/workflows/bwc-test-workflow.yml @@ -25,7 +25,7 @@ jobs: java-version: ${{ matrix.java }} # This step uses the checkout Github action: https://github.com/actions/checkout - name: Checkout Branch - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Run Job-scheduler Backwards Compatibility Tests run: | echo "Running backwards compatibility tests..." diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 34a0b467..4719543b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,28 +8,74 @@ on: - "*" jobs: - build: + Get-CI-Image-Tag: + uses: opensearch-project/opensearch-build/.github/workflows/get-ci-image-tag.yml@main + with: + product: opensearch + + build-job-scheduler-linux: strategy: fail-fast: false matrix: - os: [ubuntu-latest, windows-latest] - java: [11, 17] + java: [11, 17, 21] - # Job name - name: Build and Test - runs-on: ${{ matrix.os }} + name: Build job-scheduler Plugin on Linux using Container Image + runs-on: ubuntu-latest + needs: Get-CI-Image-Tag + container: + # using the same image which is used by opensearch-build team to build the OpenSearch Distribution + # this image tag is subject to change as more dependencies and updates will arrive over time + image: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-version-linux }} + # need to switch to root so that github actions can install runner binary on container without permission issues. + options: --user root steps: - # This step uses the checkout Github action: https://github.com/actions/checkout - - name: Checkout + - name: Checkout job-scheduler uses: actions/checkout@v2 - # This step uses the setup-java Github action: https://github.com/actions/setup-java - name: Setup Java ${{ matrix.java }} uses: actions/setup-java@v1 with: java-version: ${{ matrix.java }} - - name: Build and Test + - name: Run build + # switching the user, as OpenSearch cluster can only be started as root/Administrator on linux-deb/linux-rpm/windows-zip. + run: | + chown -R 1000:1000 `pwd` + su `id -un 1000` -c "./gradlew build && ./gradlew publishToMavenLocal" + + - name: Upload Coverage Report + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + + - uses: actions/upload-artifact@v3 + if: always() + with: + name: linux-JDK${{ matrix.java }}-reports + path: | + ./build/reports/ + + + build-job-scheduler-MacOS: + strategy: + fail-fast: false + matrix: + java: [11, 17, 21] + + name: Build job-scheduler Plugin on MacOS + needs: Get-CI-Image-Tag + runs-on: macos-latest + + steps: + - name: Checkout job-scheduler + uses: actions/checkout@v2 + + - name: Setup Java ${{ matrix.java }} + uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java }} + + - name: Run build run: | ./gradlew build @@ -38,6 +84,53 @@ jobs: ./gradlew publishToMavenLocal - name: Upload Coverage Report - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + + - uses: actions/upload-artifact@v3 + if: always() + with: + name: macos-JDK${{ matrix.java }}-reports + path: | + ./build/reports/ + + + build-job-scheduler-Windows: + strategy: + fail-fast: false + matrix: + java: [ 11, 17 ] + + name: Build job-scheduler Plugin on Windows + needs: Get-CI-Image-Tag + runs-on: windows-latest + + steps: + - name: Checkout job-scheduler + uses: actions/checkout@v2 + + - name: Setup Java ${{ matrix.java }} + uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java }} + + - name: Run build + run: | + ./gradlew.bat build + + - name: Publish to Maven Local + run: | + ./gradlew.bat publishToMavenLocal + + - name: Upload Coverage Report + uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} + + - uses: actions/upload-artifact@v3 + if: always() + with: + name: windows-JDK${{ matrix.java }}-reports + path: | + ./build/reports/ diff --git a/.github/workflows/create-documentation-issue.yml b/.github/workflows/create-documentation-issue.yml index 3c0d2e5d..1522993d 100644 --- a/.github/workflows/create-documentation-issue.yml +++ b/.github/workflows/create-documentation-issue.yml @@ -14,14 +14,14 @@ jobs: steps: - name: GitHub App token id: github_app_token - uses: tibdex/github-app-token@v1.5.0 + uses: tibdex/github-app-token@v2.1.0 with: app_id: ${{ secrets.APP_ID }} private_key: ${{ secrets.APP_PRIVATE_KEY }} installation_id: 22958780 - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Edit the issue template run: | @@ -29,7 +29,7 @@ jobs: - name: Create Issue From File id: create-issue - uses: peter-evans/create-issue-from-file@v4 + uses: peter-evans/create-issue-from-file@v5 with: title: Add documentation related to new feature content-filepath: ./.github/ISSUE_TEMPLATE/documentation.md diff --git a/.github/workflows/dco.yml b/.github/workflows/dco.yml deleted file mode 100644 index 0a30f2f7..00000000 --- a/.github/workflows/dco.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Developer Certificate of Origin Check - -on: [pull_request] - -jobs: - check: - runs-on: ubuntu-latest - - steps: - - name: Get PR Commits - id: 'get-pr-commits' - uses: tim-actions/get-pr-commits@v1.1.0 - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: DCO Check - uses: tim-actions/dco@v1.1.0 - with: - commits: ${{ steps.get-pr-commits.outputs.commits }} diff --git a/.github/workflows/draft-release-notes-workflow.yml b/.github/workflows/draft-release-notes-workflow.yml index 5e67f9a5..8803ff99 100644 --- a/.github/workflows/draft-release-notes-workflow.yml +++ b/.github/workflows/draft-release-notes-workflow.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Update draft release notes - uses: release-drafter/release-drafter@v5 + uses: release-drafter/release-drafter@v6 with: config-name: draft-release-notes-config.yml name: Version (set here) diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 1eacb7b3..76e00195 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -9,7 +9,7 @@ jobs: linkchecker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: lychee Link Checker id: lychee uses: lycheeverse/lychee-action@master diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml index 3199c5cc..aabc13d4 100644 --- a/.github/workflows/maven-publish.yml +++ b/.github/workflows/maven-publish.yml @@ -5,8 +5,8 @@ on: push: branches: - main - - '1.3' - - 2.x + - '[0-9]+.[0-9]+' + - '[0-9]+.x' jobs: build-and-publish-snapshots: @@ -17,7 +17,7 @@ jobs: contents: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK 11 uses: actions/setup-java@v3 with: @@ -25,7 +25,7 @@ jobs: java-version: 11 - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }} aws-region: us-east-1 @@ -36,4 +36,9 @@ jobs: export SONATYPE_PASSWORD=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-password --query SecretString --output text) echo "::add-mask::$SONATYPE_USERNAME" echo "::add-mask::$SONATYPE_PASSWORD" - ./gradlew publishAllPublicationsToSnapshotsRepository + # For JS-SPI jar + ./gradlew publishShadowPublicationToSnapshotsRepository + # For JS jar + ./gradlew publishNebulaPublicationToSnapshotsRepository + # For JS plugin zip + ./gradlew publishPluginZipPublicationToSnapshotsRepository diff --git a/.github/workflows/release-workflow.yml b/.github/workflows/release-workflow.yml index 10dc9936..e52c7748 100644 --- a/.github/workflows/release-workflow.yml +++ b/.github/workflows/release-workflow.yml @@ -17,7 +17,7 @@ jobs: steps: # This step uses the checkout Github action: https://github.com/actions/checkout - name: Checkout Branch - uses: actions/checkout@v2 + uses: actions/checkout@v4 # This step uses the setup-java Github action: https://github.com/actions/setup-java - name: Set Up JDK ${{ matrix.java }} uses: actions/setup-java@v1 @@ -41,7 +41,7 @@ jobs: # AWS authentication - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@v4 with: aws-access-key-id: ${{ secrets.AWS_STAGING_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_STAGING_SECRET_ACCESS_KEY }} @@ -74,7 +74,7 @@ jobs: # Creating release draft - name: Create Github Draft Release id: create_release - uses: actions/create-release@v1.0.0 + uses: actions/create-release@v1.1.4 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -85,7 +85,7 @@ jobs: # Upload the release with .zip as asset - name: Upload Release Asset - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -96,7 +96,7 @@ jobs: # Upload the release with .rpm as asset - name: Upload Release Asset - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -107,7 +107,7 @@ jobs: # Upload the release with .deb as asset - name: Upload Release Asset - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -117,7 +117,7 @@ jobs: asset_content_type: application/zip - name: Upload Workflow Artifacts - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v3 with: name: job-scheduler-plugin path: job-scheduler-artifacts diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 9d419f0a..7eec4c77 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -4,10 +4,22 @@ This document contains a list of maintainers in this repo. See [opensearch-proje ## Current Maintainers -| Maintainer | GitHub ID | Affiliation | -| ---------------- | ------------------------------------------------- | ----------- | -| Ashish Agrawal | [lezzago](https://github.com/lezzago) | Amazon | -| Bowen Lan | [bowenlan-amzn](https://github.com/bowenlan-amzn) | Amazon | -| Drew Baugher | [dbbaughe](https://github.com/dbbaughe) | Amazon | -| Mohammad Qureshi | [qreshi](https://github.com/qreshi) | Amazon | -| Sriram Kosuri | [skkosuri-amzn](https://github.com/skkosuri-amzn) | Amazon | +| Maintainer | GitHub ID | Affiliation | +| ----------------- | ------------------------------------------------------- | ----------- | +| Josh Palis | [joshpalis](https://github.com/joshpalis) | Amazon | +| Sarat Vemulapalli | [saratvemulapalli](https://github.com/saratvemulapalli) | Amazon | +| Dan Widdis | [dbwiddis](https://github.com/dbwiddis) | Amazon | +| Kaituo Li | [kaituo](https://github.com/kaituo) | Amazon | +| Varun Jain | [vibrantvarun](https://github.com/vibrantvarun) | Amazon | +| Craig Perkins | [cwperks](https://github.com/cwperks) | Amazon | +| Prudhvi Godithi | [prudhvigodithi](https://github.com/prudhvigodithi) | Amazon | + +## Emeritus Maintainers + +| Maintainer | GitHub ID | Affiliation | +| ----------------- | ------------------------------------------------------- | ----------- | +| Ashish Agrawal | [lezzago](https://github.com/lezzago) | Amazon | +| Bowen Lan | [bowenlan-amzn](https://github.com/bowenlan-amzn) | Amazon | +| Drew Baugher | [dbbaughe](https://github.com/dbbaughe) | Amazon | +| Mohammad Qureshi | [qreshi](https://github.com/qreshi) | Amazon | +| Sriram Kosuri | [skkosuri-amzn](https://github.com/skkosuri-amzn) | Amazon | diff --git a/build.gradle b/build.gradle index 064fd5a4..4483aef9 100644 --- a/build.gradle +++ b/build.gradle @@ -24,9 +24,9 @@ buildscript { } plugins { - id 'nebula.ospackage' version "8.3.0" + id 'com.netflix.nebula.ospackage' version "11.9.1" id 'java-library' - id "com.diffplug.spotless" version "6.12.0" apply false + id "com.diffplug.spotless" version "6.25.0" } apply plugin: 'opensearch.opensearchplugin' @@ -150,15 +150,28 @@ repositories { maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } } +configurations { + all { + resolutionStrategy { + force 'com.google.guava:guava:32.1.3-jre' + } + } +} + dependencies { implementation project(path: ":${rootProject.name}-spi", configuration: 'shadow') - implementation group: 'com.google.guava', name: 'guava', version:'31.0.1-jre' - implementation group: 'com.google.guava', name: 'failureaccess', version:'1.0.1' + implementation group: 'com.google.guava', name: 'guava', version:'32.1.3-jre' + implementation group: 'com.google.guava', name: 'failureaccess', version:'1.0.2' + testImplementation group: 'org.mockito', name: 'mockito-core', version: "${versions.mockito}" javaRestTestImplementation project.sourceSets.main.runtimeClasspath + //spotless + implementation('com.google.googlejavaformat:google-java-format:1.22.0') { + exclude group: 'com.google.guava' + } } // RPM & Debian build -apply plugin: 'nebula.ospackage' +apply plugin: 'com.netflix.nebula.ospackage' def es_tmp_dir = rootProject.file('build/private/es_tmp').absoluteFile es_tmp_dir.mkdirs() @@ -176,6 +189,8 @@ task integTest(type: RestIntegTestTask) { } tasks.named("check").configure { dependsOn(integTest) } +tasks.generatePomFileForPluginZipPublication.dependsOn publishNebulaPublicationToMavenLocal + integTest { if (project.hasProperty('excludeTests')) { project.properties['excludeTests']?.replaceAll('\\s', '')?.split('[,;]')?.each { @@ -303,9 +318,8 @@ afterEvaluate { task renameRpm(type: Copy) { from("$buildDir/distributions") into("$buildDir/distributions") - include archiveName - rename archiveName, "${packageName}-${version}.rpm" - doLast { delete file("$buildDir/distributions/$archiveName") } + rename "$archiveFileName", "${packageName}-${version}.rpm" + doLast { delete file("$buildDir/distributions/$archiveFileName") } } } @@ -316,9 +330,8 @@ afterEvaluate { task renameDeb(type: Copy) { from("$buildDir/distributions") into("$buildDir/distributions") - include archiveName - rename archiveName, "${packageName}-${version}.deb" - doLast { delete file("$buildDir/distributions/$archiveName") } + rename "$archiveFileName", "${packageName}-${version}.deb" + doLast { delete file("$buildDir/distributions/$archiveFileName") } } } } diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180f..d64cd491 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index ae04661e..db8c3baa 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,8 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip +distributionSha256Sum=9d926787066a081739e8200858338b4a69e837c3a821a33aca9db09dd4a41026 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip +networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1b6c7873..1aa94a42 100755 --- a/gradlew +++ b/gradlew @@ -55,7 +55,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,13 +80,11 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -133,22 +131,29 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -193,11 +198,15 @@ if "$cygwin" || "$msys" ; then done fi -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ @@ -205,6 +214,12 @@ set -- \ org.gradle.wrapper.GradleWrapperMain \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat old mode 100644 new mode 100755 index ac1b06f9..6689b85b --- a/gradlew.bat +++ b/gradlew.bat @@ -14,7 +14,7 @@ @rem limitations under the License. @rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +25,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/release-notes/opensearch-job-scheduler.release-notes-2.13.0.0.md b/release-notes/opensearch-job-scheduler.release-notes-2.13.0.0.md new file mode 100644 index 00000000..3e9817c3 --- /dev/null +++ b/release-notes/opensearch-job-scheduler.release-notes-2.13.0.0.md @@ -0,0 +1,10 @@ +## Version 2.13.0.0 + +Compatible with OpenSearch 2.13.0 + +### Maintenance +* Increment version to 2.13.0 ([#575](https://github.com/opensearch-project/job-scheduler/pull/575)) +* Spotless plugin remove apply false ([#590](https://github.com/opensearch-project/job-scheduler/pull/590)) [(#591)](https://github.com/opensearch-project/job-scheduler/pull/591) +* dependabot: bump com.google.googlejavaformat:google-java-format [(#596)](https://github.com/opensearch-project/job-scheduler/pull/596) [(#597)](https://github.com/opensearch-project/job-scheduler/pull/597) +* dependabot: bump com.netflix.nebula.ospackage from 11.8.0 to 11.8.1 [(#593)](https://github.com/opensearch-project/job-scheduler/pull/593) [(#598)](https://github.com/opensearch-project/job-scheduler/pull/598) +* dependabot: bump org.slf4j:slf4j-api from 2.0.11 to 2.0.12 [(#587)](https://github.com/opensearch-project/job-scheduler/pull/587) [(#599)](https://github.com/opensearch-project/job-scheduler/pull/599) diff --git a/release-notes/opensearch-job-scheduler.release-notes-2.14.0.0.md b/release-notes/opensearch-job-scheduler.release-notes-2.14.0.0.md new file mode 100644 index 00000000..f5914c9c --- /dev/null +++ b/release-notes/opensearch-job-scheduler.release-notes-2.14.0.0.md @@ -0,0 +1,13 @@ +## Version 2.14.0.0 + +Compatible with OpenSearch 2.14.0 + +### Maintenance +* Increment version to 2.14.0 ([#605](https://github.com/opensearch-project/job-scheduler/pull/605)) +* dependabot: bump com.google.googlejavaformat:google-java-format ([#608](https://github.com/opensearch-project/job-scheduler/pull/608)) +* dependabot: bump org.slf4j:slf4j-api from 2.0.12 to 2.0.13 [(#611)](https://github.com/opensearch-project/job-scheduler/pull/611) [(#614)](https://github.com/opensearch-project/job-scheduler/pull/614) +* dependabot: bump org.gradle.test-retry from 1.5.8 to 1.5.9 [(#618)](https://github.com/opensearch-project/job-scheduler/pull/618) [(#619)](https://github.com/opensearch-project/job-scheduler/pull/619) +* dependabot: bump com.netflix.nebula.ospackage from 11.8.1 to 11.9.0 [(#617)](https://github.com/opensearch-project/job-scheduler/pull/617) [(#620)](https://github.com/opensearch-project/job-scheduler/pull/620) + +### Infrastructure +* Improve the repo Code Coverage percentage [(#616)](https://github.com/opensearch-project/job-scheduler/pull/616) [(#621)](https://github.com/opensearch-project/job-scheduler/pull/621) diff --git a/release-notes/opensearch-job-scheduler.release-notes-2.15.0.0.md b/release-notes/opensearch-job-scheduler.release-notes-2.15.0.0.md new file mode 100644 index 00000000..7f35d006 --- /dev/null +++ b/release-notes/opensearch-job-scheduler.release-notes-2.15.0.0.md @@ -0,0 +1,11 @@ +## Version 2.14.0.0 + +Compatible with OpenSearch 2.14.0 + +### Maintenance +* Increment version to 2.15.0 ([#626](https://github.com/opensearch-project/job-scheduler/pull/626)). +* dependabot: bump com.netflix.nebula.ospackage from 11.9.0 to 11.9.1 [(#634)](https://github.com/opensearch-project/job-scheduler/pull/634) [(#635)](https://github.com/opensearch-project/job-scheduler/pull/635). + +### Infrastructure +* Refer to the version of Mockito from core's buildSrc/version.properties [(#630)](https://github.com/opensearch-project/job-scheduler/pull/630) [(#631)](https://github.com/opensearch-project/job-scheduler/pull/631). +* Codecov GitHub Action changed back to v3 [(#622)](https://github.com/opensearch-project/job-scheduler/pull/622) [(#623)](https://github.com/opensearch-project/job-scheduler/pull/623). diff --git a/release-notes/opensearch.job-scheduler.release-notes-2.10.0.0.md b/release-notes/opensearch.job-scheduler.release-notes-2.10.0.0.md new file mode 100644 index 00000000..95fded8d --- /dev/null +++ b/release-notes/opensearch.job-scheduler.release-notes-2.10.0.0.md @@ -0,0 +1,21 @@ +## Version 2.10.0.0 + +Compatible with OpenSearch 2.10.0 + +### Added +* Setting JobSweeper search preference against primary shard ([#483](https://github.com/opensearch-project/job-scheduler/pull/483)) ([#485](https://github.com/opensearch-project/job-scheduler/pull/485)) +* Converts .opendistro-job-scheduler-lock index into a system index ([#478](https://github.com/opensearch-project/job-scheduler/pull/478)) +* Public snapshots on all release branches ([#475](https://github.com/opensearch-project/job-scheduler/pull/475)) ([#476](https://github.com/opensearch-project/job-scheduler/pull/476)) + +### Fixed +* Call listner.onFailure when lock creation failed ([#435](https://github.com/opensearch-project/job-scheduler/pull/435)) ([#443](https://github.com/opensearch-project/job-scheduler/pull/443)) + +### Maintenance +* Update packages according to a change in OpenSearch core ([#422](https://github.com/opensearch-project/job-scheduler/pull/422)) ([#431](https://github.com/opensearch-project/job-scheduler/pull/431)) +* Xcontent changes to ODFERestTestCase ([#440](https://github.com/opensearch-project/job-scheduler/pull/440)) +* Update LifecycleListener import ([#445](https://github.com/opensearch-project/job-scheduler/pull/445)) +* Bump slf4j-api to 2.0.7, ospackage to 11.4.0, google-java-format to 1.17.0, guava to 32.1.2-jre and spotless to 6.20.0 ([#453](https://github.com/opensearch-project/job-scheduler/pull/453)) +* Fixing Strings import ([#459](https://github.com/opensearch-project/job-scheduler/pull/459)) +* bump com.cronutils:cron-utils from 9.2.0 to 9.2.1 ([#458](https://github.com/opensearch-project/job-scheduler/pull/458)) +* React to changes in ActionListener and ActionFuture ([#467](https://github.com/opensearch-project/job-scheduler/pull/467)) +* bump com.diffplug.spotless from 6.20.0 to 6.21.0 ([#484](https://github.com/opensearch-project/job-scheduler/pull/484)) \ No newline at end of file diff --git a/release-notes/opensearch.job-scheduler.release-notes-2.11.0.0.md b/release-notes/opensearch.job-scheduler.release-notes-2.11.0.0.md new file mode 100644 index 00000000..5755d1bb --- /dev/null +++ b/release-notes/opensearch.job-scheduler.release-notes-2.11.0.0.md @@ -0,0 +1,15 @@ +## Version 2.11.0.0 + +Compatible with OpenSearch 2.11.0 + +### Maintenance +* bump actions/upload-release-asset from 1.0.1 to 1.0.2 ([#504](https://github.com/opensearch-project/job-scheduler/pull/504))([#506](https://github.com/opensearch-project/job-scheduler/pull/506)) +* bump aws-actions/configure-aws-credentials from 1 to 4 ([#501](https://github.com/opensearch-project/job-scheduler/pull/501))([#507](https://github.com/opensearch-project/job-scheduler/pull/507)) +* bump com.netflix.nebula.ospackage from 11.4.0 to 11.5.0 ([#500](https://github.com/opensearch-project/job-scheduler/pull/500))([#508](https://github.com/opensearch-project/job-scheduler/pull/508)) +* manual backport of #503 ([#509](https://github.com/opensearch-project/job-scheduler/pull/509)) +* bump actions/create-release from 1.0.0 to 1.1.4 ([#514](https://github.com/opensearch-project/job-scheduler/pull/514))([#521](https://github.com/opensearch-project/job-scheduler/pull/521)) +* bump codecov/codecov-action from 1 to 3 ([#513](https://github.com/opensearch-project/job-scheduler/pull/513))([#520](https://github.com/opensearch-project/job-scheduler/pull/520)) +* bump actions/upload-artifact from 1 to 3 ([#512](https://github.com/opensearch-project/job-scheduler/pull/512)) ([#519](https://github.com/opensearch-project/job-scheduler/pull/519)) +* bump tibdex/github-app-token from 1.5.0 to 2.1.0 ([#511](https://github.com/opensearch-project/job-scheduler/pull/511))([#518](https://github.com/opensearch-project/job-scheduler/pull/518)) +* bump com.diffplug.spotless from 6.21.0 to 6.22.0 ([#510](https://github.com/opensearch-project/job-scheduler/pull/510))([#517](https://github.com/opensearch-project/job-scheduler/pull/517)) +* bump VachaShah/backport from 1.1.4 to 2.2.0 ([#515](https://github.com/opensearch-project/job-scheduler/pull/515))([#516](https://github.com/opensearch-project/job-scheduler/pull/516)) \ No newline at end of file diff --git a/release-notes/opensearch.job-scheduler.release-notes-2.12.0.0.md b/release-notes/opensearch.job-scheduler.release-notes-2.12.0.0.md new file mode 100644 index 00000000..67602d72 --- /dev/null +++ b/release-notes/opensearch.job-scheduler.release-notes-2.12.0.0.md @@ -0,0 +1,18 @@ +## Version 2.12.0.0 Release Notes + +Compatible with OpenSearch 2.12.0 + +### Maintenance +* Fix flaky tests ([#556](https://github.com/opensearch-project/job-scheduler/pull/556)). +* Use the build CI image in the Build and Test workflow ([#534](https://github.com/opensearch-project/job-scheduler/pull/534)). +* Upgrade gradle to 8.5 ([#545](https://github.com/opensearch-project/job-scheduler/pull/545)). +* Update release-drafter/release-drafter from 5 to 6 ([#567](https://github.com/opensearch-project/job-scheduler/pull/567)). +* Update peter-evans/create-issue-from-file from 4 to 5 ([#566](https://github.com/opensearch-project/job-scheduler/pull/566)). +* Update `org.slf4j:slf4j-api` from 2.0.7 to 2.0.11 ([#570](https://github.com/opensearch-project/job-scheduler/pull/570)). +* Update `com.google.googlejavaformat:google-java-format` from 1.17.0 to 1.19.2 ([#555](https://github.com/opensearch-project/job-scheduler/pull/555)). +* Update `com.google.guava:guava` from 32.1.2-jre to 32.1.3-jre ([#530](https://github.com/opensearch-project/job-scheduler/pull/530)). +* Update `com.google.guava:failureacces`s from 1.0.1 to 1.0.2 ([#532](https://github.com/opensearch-project/job-scheduler/pull/532)). +* Update `com.netflix.nebula.ospackage` from 11.5.0 to 11.6.0 ([#551](https://github.com/opensearch-project/job-scheduler/pull/551)). +* Update `com.diffplug.spotless` from 6.22.0 to 6.25.0 ([#558](https://github.com/opensearch-project/job-scheduler/pull/558)). +* Fix backport workflow ([#533](https://github.com/opensearch-project/job-scheduler/pull/533)). +* Enable `publishPluginZipPublicationToMavenLocal` gradle task to publish job-scheduler plugin zip to maven local ([#584](https://github.com/opensearch-project/job-scheduler/pull/584)). diff --git a/release-notes/opensearch.job-scheduler.release-notes-2.7.0.0.md b/release-notes/opensearch.job-scheduler.release-notes-2.7.0.0.md new file mode 100644 index 00000000..6f6b44cc --- /dev/null +++ b/release-notes/opensearch.job-scheduler.release-notes-2.7.0.0.md @@ -0,0 +1,13 @@ +## Version 2.7.0.0 2023-04-18 + +Compatible with OpenSearch 2.7.0 + +### Added +* Adding name and description to org.opensearch.opensearch-job-scheduler pom ([#338](https://github.com/opensearch-project/job-scheduler/pull/338)) +* Adding Decoupling snapshots ([#324](https://github.com/opensearch-project/job-scheduler/pull/324)) +* Adding generatePomFileForPluginZipPublication as dependency of publishNebulaPublicationToSnapshotsRepository ([#360](https://github.com/opensearch-project/job-scheduler/pull/360)) +* Adding groupId to pom section of build.gradle ([#363](https://github.com/opensearch-project/job-scheduler/pull/363)) + +### Maintenance +* Update to Gradle 8.0.2 ([#348](https://github.com/opensearch-project/job-scheduler/pull/348)) + diff --git a/release-notes/opensearch.job-scheduler.release-notes-2.8.0.0.md b/release-notes/opensearch.job-scheduler.release-notes-2.8.0.0.md new file mode 100644 index 00000000..8ac23d26 --- /dev/null +++ b/release-notes/opensearch.job-scheduler.release-notes-2.8.0.0.md @@ -0,0 +1,12 @@ +## Version 2.8.0.0 2023-06-06 + +Compatible with OpenSearch 2.8.0 + +### Added +* Add auto-release github workflow ([#385](https://github.com/opensearch-project/job-scheduler/pull/385)) + +### Maintenance +* Consuming breaking changes from moving ExtensionActionRequest ([#381](https://github.com/opensearch-project/job-scheduler/pull/381)) +* Fix the Maven publish ([#379](https://github.com/opensearch-project/job-scheduler/pull/379)) +* Fixes issue with publishing Job Scheduler artifacts to correct maven coordinates ([#377](https://github.com/opensearch-project/job-scheduler/pull/377)) +* Bumping JS main BWC test version for sample extension plugin to 2.8 ([#371](https://github.com/opensearch-project/job-scheduler/pull/371)) diff --git a/release-notes/opensearch.job-scheduler.release-notes-2.9.0.0.md b/release-notes/opensearch.job-scheduler.release-notes-2.9.0.0.md new file mode 100644 index 00000000..de817ca8 --- /dev/null +++ b/release-notes/opensearch.job-scheduler.release-notes-2.9.0.0.md @@ -0,0 +1,16 @@ +## Version 2.9.0.0 2023-07-24 + +Compatible with OpenSearch 2.9.0 + +### Added +* Add MacOS CI Workflow([#412](https://github.com/opensearch-project/job-scheduler/pull/412)) + +### Fixed +* Update job details mapping ([#391](https://github.com/opensearch-project/job-scheduler/pull/391)) +* Force DOCUMENT replication on lock index ([#417](https://github.com/opensearch-project/job-scheduler/pull/417)) +* Fix bug time out of range ([#409](https://github.com/opensearch-project/job-scheduler/pull/409)) +* Update packages according to a change in OpenSearch core ([#422](https://github.com/opensearch-project/job-scheduler/pull/422)) + +### Maintenance +* Upgrade gradle version to 8.1.1 ([#402](https://github.com/opensearch-project/job-scheduler/pull/402)) +* Upgrade guava from 31.0.1-jre to latest 32.0.1-jre ([#411](https://github.com/opensearch-project/job-scheduler/pull/411)) \ No newline at end of file diff --git a/sample-extension-plugin/build.gradle b/sample-extension-plugin/build.gradle index ff5d40d6..2d5a2659 100644 --- a/sample-extension-plugin/build.gradle +++ b/sample-extension-plugin/build.gradle @@ -9,6 +9,7 @@ apply plugin: 'opensearch.java-rest-test' import org.opensearch.gradle.test.RestIntegTestTask import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask +import org.apache.tools.ant.taskdefs.condition.Os import java.util.concurrent.Callable @@ -128,11 +129,12 @@ testClusters.integTest { } String baseName = "jobSchedulerBwcCluster" -String bwcOpenSearchVersion = "2.7.0" +String bwcOpenSearchVersion = "2.15.0" String bwcPluginVersion = bwcOpenSearchVersion + ".0" String bwcFilePath = "src/test/resources/bwc/job-scheduler/" +bwcOpenSearchVersion += "-SNAPSHOT" String bwcFileName = "opensearch-job-scheduler-" + bwcPluginVersion + ".zip" -String bwcDownloadUrl = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/" + bwcOpenSearchVersion + "/latest/linux/x64/tar/builds/opensearch/plugins/" + bwcFileName +String bwcDownloadUrl = "https://aws.oss.sonatype.org/service/local/artifact/maven/redirect?r=snapshots&g=org.opensearch.plugin&a=opensearch-job-scheduler&v=$bwcPluginVersion-SNAPSHOT&p=zip" 2.times {i -> testClusters { @@ -278,7 +280,9 @@ task bwcTestSuite(type: StandaloneRestIntegTestTask) { dependsOn tasks.named("${baseName}#rollingUpgradeClusterTask") dependsOn tasks.named("${baseName}#fullRestartClusterTask") } -tasks.named("check").configure {dependsOn(bwcTestSuite)} +if (!(Os.isFamily(Os.FAMILY_MAC))) { + tasks.named("check").configure {dependsOn(bwcTestSuite)} +} run { doFirst { diff --git a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java index b50c973c..94ff42de 100644 --- a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java +++ b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java @@ -18,14 +18,14 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsFilter; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParserUtils; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.plugins.ActionPlugin; diff --git a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java index 2a559ec3..68dbfa05 100644 --- a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java +++ b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java @@ -10,7 +10,7 @@ import org.opensearch.action.support.WriteRequest; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.action.delete.DeleteRequest; import org.opensearch.action.delete.DeleteResponse; import org.opensearch.action.index.IndexRequest; @@ -21,7 +21,7 @@ import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestResponse; -import org.opensearch.rest.RestStatus; +import org.opensearch.core.rest.RestStatus; import java.io.IOException; import java.time.Instant; diff --git a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java index baef15d8..95114773 100644 --- a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java +++ b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java @@ -17,7 +17,7 @@ import org.opensearch.jobscheduler.spi.utils.LockService; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.service.ClusterService; import org.opensearch.plugins.Plugin; @@ -123,10 +123,9 @@ public void runJob(ScheduledJobParameter jobParameter, JobExecutionContext conte lockService.release( lock, - ActionListener.wrap( - released -> { log.info("Released lock for job {}", jobParameter.getName()); }, - exception -> { throw new IllegalStateException("Failed to release lock."); } - ) + ActionListener.wrap(released -> { log.info("Released lock for job {}", jobParameter.getName()); }, exception -> { + throw new IllegalStateException("Failed to release lock."); + }) ); }, exception -> { throw new IllegalStateException("Failed to acquire lock."); })); } diff --git a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java index 564d67ea..3bca9556 100644 --- a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java +++ b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java @@ -23,7 +23,7 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; -import org.opensearch.rest.RestStatus; +import org.opensearch.core.rest.RestStatus; import org.opensearch.test.rest.OpenSearchRestTestCase; import java.io.IOException; diff --git a/settings.gradle b/settings.gradle index 89529542..7ff6686f 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,4 +10,4 @@ project(":spi").name = rootProject.name + "-spi" include "sample-extension-plugin" project(":sample-extension-plugin").name = rootProject.name + "-sample-extension" -startParameter.excludedTaskNames=["publishPluginZipPublicationToMavenLocal", "publishPluginZipPublicationToStagingRepository"] +startParameter.excludedTaskNames=["publishPluginZipPublicationToStagingRepository"] diff --git a/spi/build.gradle b/spi/build.gradle index ff8dccb8..51b18d06 100644 --- a/spi/build.gradle +++ b/spi/build.gradle @@ -11,6 +11,7 @@ plugins { id 'jacoco' id 'maven-publish' id 'signing' + id "org.gradle.test-retry" version "1.5.9" } apply plugin: 'opensearch.java' @@ -43,13 +44,13 @@ jacocoTestReport { } check.dependsOn jacocoTestReport -def slf4j_version_of_cronutils = "1.7.36" +def slf4j_version_of_cronutils = "2.0.13" dependencies { compileOnly "org.opensearch:opensearch:${opensearch_version}" // slf4j is the runtime dependency of cron-utils // if cron-utils version gets bumped, pls check the slf4j version cron-utils depending on // and bump if needed - implementation "com.cronutils:cron-utils:9.2.0" + implementation "com.cronutils:cron-utils:9.2.1" runtimeOnly "org.slf4j:slf4j-api:${slf4j_version_of_cronutils}" testImplementation "org.opensearch.test:framework:${opensearch_version}" @@ -66,10 +67,14 @@ configurations.all { shadowJar { relocate 'com.cronutils', 'org.opensearch.jobscheduler.repackage.com.cronutils' relocate 'org.slf4j', 'org.opensearch.jobscheduler.repackage.org.slf4j' // dependency of cron-utils - classifier = null + archiveClassifier = null } test { + retry { + failOnPassedAfterRetry = false + maxRetries = 5 + } doFirst { // reverse operation of https://github.com/elastic/elasticsearch/blob/7.6/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy#L736-L743 // to fix the classpath for unit tests diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java index 9d6e8187..750d695c 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java @@ -11,9 +11,9 @@ import java.io.IOException; import java.util.Locale; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; /** * Structure to represent scheduled job document version. JobScheduler use this to determine this job diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java index cc9b3d35..c750019e 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java @@ -8,9 +8,9 @@ */ package org.opensearch.jobscheduler.spi; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.jobscheduler.spi.utils.LockService; import java.io.IOException; diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java index 808dc33a..8b0a134c 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java @@ -10,11 +10,11 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.OpenSearchException; -import org.opensearch.common.bytes.BytesReference; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParserUtils; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.index.seqno.SequenceNumbers; diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java index e9c4d6c3..acf7eade 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java @@ -13,12 +13,12 @@ import com.cronutils.model.time.ExecutionTime; import com.cronutils.parser.CronParser; import com.cronutils.utils.VisibleForTesting; -import org.opensearch.common.Strings; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentType; import java.io.IOException; import java.time.Clock; @@ -181,7 +181,7 @@ private XContentBuilder toXContentWithDelay(XContentBuilder builder) throws IOEx @Override public String toString() { - return Strings.toString(XContentType.JSON, this, false, true); + return Strings.toString(MediaTypeRegistry.JSON, this, false, true); } @Override diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java index 17fc8079..e4e8ef4d 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java @@ -9,13 +9,13 @@ package org.opensearch.jobscheduler.spi.schedule; import com.cronutils.utils.VisibleForTesting; -import org.opensearch.common.Strings; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentType; import java.io.IOException; import java.time.Clock; @@ -189,7 +189,7 @@ void setClock(Clock clock) { @Override public String toString() { - return Strings.toString(XContentType.JSON, this, false, true); + return Strings.toString(MediaTypeRegistry.JSON, this, false, true); } @Override diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java index 36bf8ab7..95f98246 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java @@ -9,7 +9,7 @@ package org.opensearch.jobscheduler.spi.schedule; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.xcontent.ToXContentObject; import java.time.Duration; diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java index 67da6c8d..7be74c45 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java @@ -9,7 +9,7 @@ package org.opensearch.jobscheduler.spi.schedule; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParserUtils; import java.io.IOException; import java.time.Instant; diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java index 89a73376..4e75f525 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java @@ -16,7 +16,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.ResourceAlreadyExistsException; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.delete.DeleteRequest; @@ -45,7 +45,7 @@ public final class LockService { private static final Logger logger = LogManager.getLogger(LockService.class); - private static final String LOCK_INDEX_NAME = ".opendistro-job-scheduler-lock"; + public static final String LOCK_INDEX_NAME = ".opendistro-job-scheduler-lock"; private final Client client; private final ClusterService clusterService; @@ -238,19 +238,15 @@ private void createLock(final LockModel tempLock, ActionListener list exception -> { if (exception instanceof VersionConflictEngineException) { logger.debug("Lock is already created. {}", exception.getMessage()); + listener.onResponse(null); + return; } - if (exception instanceof IOException) { - logger.error("IOException occurred creating lock", exception); - } - listener.onResponse(null); + listener.onFailure(exception); } ) ); } catch (IOException e) { logger.error("IOException occurred creating lock", e); - listener.onResponse(null); - } catch (Exception e) { - logger.error(e); listener.onFailure(e); } } diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java index 2c76666c..f8323ca0 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java @@ -11,7 +11,7 @@ import com.cronutils.model.time.ExecutionTime; import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java index cc823e9d..c75431c0 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java @@ -10,7 +10,7 @@ import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java index 85ccc847..7fdee587 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java @@ -8,7 +8,7 @@ */ package org.opensearch.jobscheduler.spi.schedule; -import org.opensearch.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java index f43d976a..c926c657 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java @@ -11,7 +11,7 @@ import org.junit.Before; import org.junit.Ignore; import org.mockito.Mockito; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.cluster.service.ClusterService; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.jobscheduler.spi.JobDocVersion; @@ -81,7 +81,8 @@ public Long getLockDurationSeconds() { @Before public void setup() { - // the test cluster is an external cluster instead of internal cluster in new test framework, + // the test cluster is an external cluster instead of internal cluster in new + // test framework, // thus the OpenSearchIntegTestCase.clusterService() will throw exception. this.clusterService = Mockito.mock(ClusterService.class, Mockito.RETURNS_DEEP_STUBS); Mockito.when(this.clusterService.state().routingTable().hasIndex(".opendistro-job-scheduler-lock")) @@ -189,6 +190,28 @@ public void testSecondAcquireLockFail() throws Exception { latch.await(10L, TimeUnit.SECONDS); } + public void testAcquireLockWithLongIdFail() throws Exception { + String uniqSuffix = "_long_lock_id"; + String lockID = randomAlphaOfLengthBetween(513, 1000); + CountDownLatch latch = new CountDownLatch(1); + LockService lockService = new LockService(client(), this.clusterService); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); + + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { + fail("should throw an exception"); + }, exception -> { + assertTrue(exception.getMessage().contains("too long")); + latch.countDown(); + })); + latch.await(10L, TimeUnit.SECONDS); + } + public void testLockReleasedAndAcquired() throws Exception { String uniqSuffix = "_lock_release+acquire"; String lockID = randomAlphaOfLengthBetween(6, 15); @@ -432,7 +455,8 @@ public void testRenewLock() throws Exception { lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { assertNotNull("Expected to successfully grab lock", lock); - // Set the time of LockService (the 'lockTime' of acquired locks) to a fixed time. + // Set the time of LockService (the 'lockTime' of acquired locks) to a fixed + // time. Instant now = Instant.now(); lockService.setTime(now); lockService.renewLock(lock, ActionListener.wrap(renewedLock -> { diff --git a/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java b/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java index 0e671b10..c305118e 100644 --- a/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java +++ b/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java @@ -31,16 +31,18 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; import org.opensearch.core.ParseField; -import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexModule; +import org.opensearch.indices.SystemIndexDescriptor; import org.opensearch.jobscheduler.utils.JobDetailsService; import org.opensearch.plugins.ActionPlugin; import org.opensearch.plugins.ExtensiblePlugin; import org.opensearch.plugins.Plugin; +import org.opensearch.plugins.SystemIndexPlugin; import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestController; import org.opensearch.script.ScriptService; @@ -61,13 +63,12 @@ import com.google.common.collect.ImmutableList; -public class JobSchedulerPlugin extends Plugin implements ActionPlugin, ExtensiblePlugin { +public class JobSchedulerPlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin { public static final String OPEN_DISTRO_JOB_SCHEDULER_THREAD_POOL_NAME = "open_distro_job_scheduler"; public static final String JS_BASE_URI = "/_plugins/_job_scheduler"; private static final Logger log = LogManager.getLogger(JobSchedulerPlugin.class); - private JobSweeper sweeper; private JobScheduler scheduler; private LockService lockService; @@ -81,6 +82,21 @@ public JobSchedulerPlugin() { this.indexToJobProviders = new HashMap<>(); } + public Set getIndicesToListen() { + return indicesToListen; + } + + public Map getIndexToJobProviders() { + return indexToJobProviders; + } + + @Override + public Collection getSystemIndexDescriptors(Settings settings) { + return Collections.singletonList( + new SystemIndexDescriptor(LockService.LOCK_INDEX_NAME, "Stores lock documents used for plugin job execution") + ); + } + @Override public Collection createComponents( Client client, diff --git a/src/main/java/org/opensearch/jobscheduler/model/ExtensionJobParameter.java b/src/main/java/org/opensearch/jobscheduler/model/ExtensionJobParameter.java index c3574e71..6351c663 100644 --- a/src/main/java/org/opensearch/jobscheduler/model/ExtensionJobParameter.java +++ b/src/main/java/org/opensearch/jobscheduler/model/ExtensionJobParameter.java @@ -12,9 +12,9 @@ import java.time.Instant; import org.opensearch.common.Nullable; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; import org.opensearch.jobscheduler.spi.schedule.CronSchedule; diff --git a/src/main/java/org/opensearch/jobscheduler/model/JobDetails.java b/src/main/java/org/opensearch/jobscheduler/model/JobDetails.java index 889d4924..8b1ad320 100644 --- a/src/main/java/org/opensearch/jobscheduler/model/JobDetails.java +++ b/src/main/java/org/opensearch/jobscheduler/model/JobDetails.java @@ -17,7 +17,7 @@ import java.io.IOException; import java.util.Objects; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; /** * This model class stores the job details of the extension. @@ -95,7 +95,6 @@ public static JobDetails parse(XContentParser parser) throws IOException { String extensionUniqueId = null; ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { String fieldName = parser.currentName(); parser.nextToken(); diff --git a/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsAction.java b/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsAction.java index 94b33b46..cb3e9563 100644 --- a/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsAction.java +++ b/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsAction.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.client.node.NodeClient; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; @@ -21,7 +21,7 @@ import org.opensearch.jobscheduler.utils.JobDetailsService; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestStatus; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import java.io.IOException; @@ -34,7 +34,7 @@ import java.util.concurrent.TimeoutException; import com.google.common.collect.ImmutableList; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.rest.RestRequest.Method.PUT; /** diff --git a/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetLockAction.java b/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetLockAction.java index 4628ace6..81e44c9a 100644 --- a/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetLockAction.java +++ b/src/main/java/org/opensearch/jobscheduler/rest/action/RestGetLockAction.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.client.node.NodeClient; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -24,7 +24,7 @@ import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestRequest; -import org.opensearch.rest.RestStatus; +import org.opensearch.core.rest.RestStatus; import java.io.IOException; import java.util.List; @@ -35,7 +35,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import com.google.common.collect.ImmutableList; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.rest.RestRequest.Method.GET; import static org.opensearch.jobscheduler.spi.LockModel.GET_LOCK_ACTION; @@ -76,15 +76,12 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient // Process acquire lock request CompletableFuture inProgressFuture = new CompletableFuture<>(); - lockService.acquireLockWithId( - jobIndexName, - lockDurationSeconds, - jobId, - ActionListener.wrap(lockModel -> { inProgressFuture.complete(lockModel); }, exception -> { - logger.error("Could not acquire lock with ID : " + jobId, exception); - inProgressFuture.completeExceptionally(exception); - }) - ); + lockService.acquireLockWithId(jobIndexName, lockDurationSeconds, jobId, ActionListener.wrap(lockModel -> { + inProgressFuture.complete(lockModel); + }, exception -> { + logger.error("Could not acquire lock with ID : " + jobId, exception); + inProgressFuture.completeExceptionally(exception); + })); try { inProgressFuture.orTimeout(JobDetailsService.TIME_OUT_FOR_REQUEST, TimeUnit.SECONDS); diff --git a/src/main/java/org/opensearch/jobscheduler/rest/action/RestReleaseLockAction.java b/src/main/java/org/opensearch/jobscheduler/rest/action/RestReleaseLockAction.java index 62e3668d..faf4836e 100644 --- a/src/main/java/org/opensearch/jobscheduler/rest/action/RestReleaseLockAction.java +++ b/src/main/java/org/opensearch/jobscheduler/rest/action/RestReleaseLockAction.java @@ -19,7 +19,7 @@ import java.util.concurrent.ExecutionException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.client.node.NodeClient; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.jobscheduler.JobSchedulerPlugin; @@ -30,7 +30,7 @@ import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestRequest; import static org.opensearch.rest.RestRequest.Method.PUT; -import org.opensearch.rest.RestStatus; +import org.opensearch.core.rest.RestStatus; public class RestReleaseLockAction extends BaseRestHandler { diff --git a/src/main/java/org/opensearch/jobscheduler/rest/request/GetJobDetailsRequest.java b/src/main/java/org/opensearch/jobscheduler/rest/request/GetJobDetailsRequest.java index 8fc8a46d..7e3f8aed 100644 --- a/src/main/java/org/opensearch/jobscheduler/rest/request/GetJobDetailsRequest.java +++ b/src/main/java/org/opensearch/jobscheduler/rest/request/GetJobDetailsRequest.java @@ -11,10 +11,10 @@ import java.util.Objects; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParserUtils; import java.io.IOException; diff --git a/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java b/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java index afbcf4ce..21219ee6 100644 --- a/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java +++ b/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java @@ -119,13 +119,10 @@ public boolean deschedule(String indexName, String id) { jobInfo.setExpectedPreviousExecutionTime(null); Scheduler.ScheduledCancellable scheduledCancellable = jobInfo.getScheduledCancellable(); - if (scheduledCancellable != null) { - if (scheduledCancellable.cancel()) { - this.scheduledJobInfo.removeJob(indexName, id); - } else { - return false; - } + if (scheduledCancellable != null && !scheduledCancellable.cancel()) { + return false; } + this.scheduledJobInfo.removeJob(indexName, id); return true; } @@ -148,7 +145,18 @@ boolean reschedule( log.info("No next execution time for job {}", jobParameter.getName()); return true; } - Duration duration = Duration.between(this.clock.instant(), nextExecutionTime); + Instant now = this.clock.instant(); + Duration duration = Duration.between(now, nextExecutionTime); + if (duration.isNegative()) { + log.info( + "job {} expected time: {} < current time: {}, setting next execute time to current", + jobParameter.getName(), + nextExecutionTime.toEpochMilli(), + now.toEpochMilli() + ); + nextExecutionTime = now; + duration = Duration.ZERO; + } // Too many jobs start at the same time point will bring burst. Add random jitter delay to spread out load. // Example, if interval is 10 minutes, jitter is 0.6, next job run will be randomly delayed by 0 to 10*0.6 minutes. diff --git a/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java b/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java index 593aee54..e2bb077c 100644 --- a/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java +++ b/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java @@ -8,6 +8,7 @@ */ package org.opensearch.jobscheduler.sweeper; +import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.jobscheduler.JobSchedulerSettings; import org.opensearch.jobscheduler.ScheduledJobProvider; import org.opensearch.jobscheduler.scheduler.JobScheduler; @@ -21,7 +22,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchException; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.action.bulk.BackoffPolicy; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; @@ -33,8 +34,7 @@ import org.opensearch.cluster.routing.Murmur3HashFunction; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.component.LifecycleListener; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.OpenSearchExecutors; @@ -47,8 +47,8 @@ import org.opensearch.index.engine.Engine; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.shard.IndexingOperationListener; -import org.opensearch.index.shard.ShardId; -import org.opensearch.rest.RestStatus; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.FieldSortBuilder; @@ -389,7 +389,7 @@ private void sweepShard(ShardId shardId, ShardNodes shardNodes, String startAfte String searchAfter = startAfter == null ? "" : startAfter; while (searchAfter != null) { SearchRequest jobSearchRequest = new SearchRequest().indices(shardId.getIndexName()) - .preference("_shards:" + shardId.id() + "|_only_local") + .preference("_shards:" + shardId.id() + "|_primary") .source( new SearchSourceBuilder().version(true) .seqNoAndPrimaryTerm(true) diff --git a/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockRequest.java b/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockRequest.java index 5efe5aa7..c7c53414 100644 --- a/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockRequest.java +++ b/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockRequest.java @@ -12,12 +12,12 @@ import java.util.Objects; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParserUtils; /** * Request from extensions to acquire a lock for scheduled job execution diff --git a/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockResponse.java b/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockResponse.java index c0f92a84..a9dcf9d5 100644 --- a/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockResponse.java +++ b/src/main/java/org/opensearch/jobscheduler/transport/AcquireLockResponse.java @@ -10,7 +10,7 @@ import java.io.IOException; -import org.opensearch.common.xcontent.XContentParserUtils; +import org.opensearch.core.xcontent.XContentParserUtils; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; diff --git a/src/main/java/org/opensearch/jobscheduler/transport/request/ExtensionJobActionRequest.java b/src/main/java/org/opensearch/jobscheduler/transport/request/ExtensionJobActionRequest.java index 711c2341..11091b02 100644 --- a/src/main/java/org/opensearch/jobscheduler/transport/request/ExtensionJobActionRequest.java +++ b/src/main/java/org/opensearch/jobscheduler/transport/request/ExtensionJobActionRequest.java @@ -9,9 +9,14 @@ package org.opensearch.jobscheduler.transport.request; import java.io.IOException; -import org.opensearch.common.io.stream.Writeable; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; + +import com.google.protobuf.ByteString; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.extensions.action.ExtensionActionRequest; -import org.opensearch.jobscheduler.utils.JobDetailsService; /** * Request to extensions to invoke a job action, converts request params to a byte array @@ -19,6 +24,8 @@ */ public class ExtensionJobActionRequest extends ExtensionActionRequest { + public static final byte UNIT_SEPARATOR = (byte) '\u001F'; + /** * Instantiates a new ExtensionJobActionRequest * @@ -27,7 +34,36 @@ public class ExtensionJobActionRequest extends ExtensionAct * @throws IOException if serialization fails */ public ExtensionJobActionRequest(String extensionActionName, T actionParams) throws IOException { - super(extensionActionName, JobDetailsService.convertParamsToBytes(actionParams)); + super(extensionActionName, convertParamsToByteString(actionParams)); + } + + /** + * Converts an object of type T that extends {@link Writeable} into a byte array and prepends the fully qualified request class name bytes + * + * @param a class that extends writeable + * @param actionParams the action parameters to be serialized + * @throws IOException if serialization fails + * @return the byte array of the parameters + */ + private static ByteString convertParamsToByteString(T actionParams) throws IOException { + + // Write inner request to output stream and convert to byte array + BytesStreamOutput out = new BytesStreamOutput(); + actionParams.writeTo(out); + out.flush(); + byte[] requestBytes = BytesReference.toBytes(out.bytes()); + + // Convert fully qualifed class name to byte array + byte[] requestClassBytes = actionParams.getClass().getName().getBytes(StandardCharsets.UTF_8); + + // Generate ExtensionActionRequest responseByte array + byte[] proxyRequestBytes = ByteBuffer.allocate(requestClassBytes.length + 1 + requestBytes.length) + .put(requestClassBytes) + .put(ExtensionJobActionRequest.UNIT_SEPARATOR) + .put(requestBytes) + .array(); + + return ByteString.copyFrom(proxyRequestBytes); } } diff --git a/src/main/java/org/opensearch/jobscheduler/transport/request/JobParameterRequest.java b/src/main/java/org/opensearch/jobscheduler/transport/request/JobParameterRequest.java index 245423a5..162a5348 100644 --- a/src/main/java/org/opensearch/jobscheduler/transport/request/JobParameterRequest.java +++ b/src/main/java/org/opensearch/jobscheduler/transport/request/JobParameterRequest.java @@ -9,10 +9,12 @@ package org.opensearch.jobscheduler.transport.request; import java.io.IOException; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; @@ -21,7 +23,7 @@ /** * Request to extensions to parse a ScheduledJobParameter */ -public class JobParameterRequest implements Writeable { +public class JobParameterRequest extends ActionRequest { /** * accessToken is the placeholder for the user Identity/access token to be used to perform validation prior to invoking the extension action @@ -94,6 +96,11 @@ public void writeTo(StreamOutput out) throws IOException { this.jobDocVersion.writeTo(out); } + @Override + public ActionRequestValidationException validate() { + return null; + } + public String getAccessToken() { return this.accessToken; } diff --git a/src/main/java/org/opensearch/jobscheduler/transport/request/JobRunnerRequest.java b/src/main/java/org/opensearch/jobscheduler/transport/request/JobRunnerRequest.java index 29eef927..195e111d 100644 --- a/src/main/java/org/opensearch/jobscheduler/transport/request/JobRunnerRequest.java +++ b/src/main/java/org/opensearch/jobscheduler/transport/request/JobRunnerRequest.java @@ -10,16 +10,17 @@ import java.io.IOException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.jobscheduler.spi.JobExecutionContext; /** * Request to extensions to invoke their ScheduledJobRunner implementation * */ -public class JobRunnerRequest implements Writeable { +public class JobRunnerRequest extends ActionRequest { /** * accessToken is the placeholder for the user Identity/access token to be used to perform validation prior to invoking the extension action @@ -78,6 +79,11 @@ public void writeTo(StreamOutput out) throws IOException { this.jobExecutionContext.writeTo(out); } + @Override + public ActionRequestValidationException validate() { + return null; + } + public String getAccessToken() { return this.accessToken; } @@ -89,4 +95,5 @@ public String getJobParameterDocumentId() { public JobExecutionContext getJobExecutionContext() { return this.jobExecutionContext; } + } diff --git a/src/main/java/org/opensearch/jobscheduler/transport/response/ExtensionJobActionResponse.java b/src/main/java/org/opensearch/jobscheduler/transport/response/ExtensionJobActionResponse.java deleted file mode 100644 index 50f37b9e..00000000 --- a/src/main/java/org/opensearch/jobscheduler/transport/response/ExtensionJobActionResponse.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ -package org.opensearch.jobscheduler.transport.response; - -import java.io.IOException; -import org.opensearch.common.io.stream.Writeable; -import org.opensearch.extensions.action.ExtensionActionResponse; -import org.opensearch.jobscheduler.utils.JobDetailsService; - -/** - * Response from extension job action, converts response params to a byte array - * - */ -public class ExtensionJobActionResponse extends ExtensionActionResponse { - - /** - * Instantiates a new ExtensionJobActionResponse - * - * @param actionResponse the response object holding the action response parameters - * @throws IOException if serialization fails - */ - public ExtensionJobActionResponse(T actionResponse) throws IOException { - super(JobDetailsService.convertParamsToBytes(actionResponse)); - } - -} diff --git a/src/main/java/org/opensearch/jobscheduler/transport/response/JobParameterResponse.java b/src/main/java/org/opensearch/jobscheduler/transport/response/JobParameterResponse.java index 18290c13..1f703b10 100644 --- a/src/main/java/org/opensearch/jobscheduler/transport/response/JobParameterResponse.java +++ b/src/main/java/org/opensearch/jobscheduler/transport/response/JobParameterResponse.java @@ -10,15 +10,15 @@ import java.io.IOException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.jobscheduler.model.ExtensionJobParameter; /** * Response from extensions to parse a ScheduledJobParameter */ -public class JobParameterResponse implements Writeable { +public class JobParameterResponse extends ActionResponse { /** * jobParameter is job index entry intended to be used to validate prior to job execution diff --git a/src/main/java/org/opensearch/jobscheduler/transport/response/JobRunnerResponse.java b/src/main/java/org/opensearch/jobscheduler/transport/response/JobRunnerResponse.java index a5a267f3..93cefd07 100644 --- a/src/main/java/org/opensearch/jobscheduler/transport/response/JobRunnerResponse.java +++ b/src/main/java/org/opensearch/jobscheduler/transport/response/JobRunnerResponse.java @@ -10,15 +10,15 @@ import java.io.IOException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; /** * Response from extensions indicating the status of the ScheduledJobRunner invocation * */ -public class JobRunnerResponse implements Writeable { +public class JobRunnerResponse extends ActionResponse { /** * jobRunnerStatus indicates if the extension job runner has been executed diff --git a/src/main/java/org/opensearch/jobscheduler/utils/JobDetailsService.java b/src/main/java/org/opensearch/jobscheduler/utils/JobDetailsService.java index 2e2eaab3..3af7cb3d 100644 --- a/src/main/java/org/opensearch/jobscheduler/utils/JobDetailsService.java +++ b/src/main/java/org/opensearch/jobscheduler/utils/JobDetailsService.java @@ -16,7 +16,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.ResourceAlreadyExistsException; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.delete.DeleteRequest; @@ -31,9 +31,6 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.extensions.action.ExtensionProxyAction; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.engine.DocumentMissingException; @@ -41,7 +38,7 @@ import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.IndexingOperationListener; -import org.opensearch.index.shard.ShardId; +import org.opensearch.core.index.shard.ShardId; import org.opensearch.jobscheduler.ScheduledJobProvider; import org.opensearch.jobscheduler.model.ExtensionJobParameter; import org.opensearch.jobscheduler.model.JobDetails; @@ -72,7 +69,7 @@ public class JobDetailsService implements IndexingOperationListener { public static final String JOB_DETAILS_INDEX_NAME = ".opensearch-job-scheduler-job-details"; private static final String PLUGINS_JOB_DETAILS_MAPPING_FILE = "/mappings/opensearch_job_scheduler_job_details.json"; - public static Long TIME_OUT_FOR_REQUEST = 10L; + public static Long TIME_OUT_FOR_REQUEST = 15L; private final Client client; private final ClusterService clusterService; private Set indicesToListen; @@ -503,24 +500,6 @@ private void updateJobDetails(final String documentId, final JobDetails updateJo } } - /** - * Takes in an object of type T that extends {@link Writeable} and converts the writeable fields to a byte array - * - * @param a class that extends writeable - * @param actionParams the action parameters to be serialized - * @throws IOException if serialization fails - * @return the byte array of the parameters - */ - public static byte[] convertParamsToBytes(T actionParams) throws IOException { - // Write all to output stream - BytesStreamOutput out = new BytesStreamOutput(); - actionParams.writeTo(out); - out.flush(); - - // convert bytes stream to byte array - return BytesReference.toBytes(out.bytes()); - } - private String jobDetailsMapping() { try { InputStream in = JobDetailsService.class.getResourceAsStream(PLUGINS_JOB_DETAILS_MAPPING_FILE); diff --git a/src/main/resources/mappings/opensearch_job_scheduler_job_details.json b/src/main/resources/mappings/opensearch_job_scheduler_job_details.json index e975bec0..0cef3f4a 100644 --- a/src/main/resources/mappings/opensearch_job_scheduler_job_details.json +++ b/src/main/resources/mappings/opensearch_job_scheduler_job_details.json @@ -1,10 +1,10 @@ { "dynamic": "false", "properties": { - "job_index_name": { + "job_index": { "type": "keyword" }, - "job_type_name": { + "job_type": { "type": "keyword" }, "job_parser_action": { diff --git a/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginTests.java b/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginTests.java new file mode 100644 index 00000000..ea03e769 --- /dev/null +++ b/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginTests.java @@ -0,0 +1,190 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.jobscheduler; + +import org.opensearch.Version; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.IndexScopedSettings; +import org.opensearch.common.settings.SettingsFilter; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.apache.lucene.tests.index.AssertingDirectoryReader; +import org.junit.Before; +import org.mockito.Mock; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.index.Index; +import org.opensearch.env.Environment; +import org.opensearch.index.IndexModule; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.analysis.AnalysisRegistry; +import org.opensearch.index.engine.EngineConfigFactory; +import org.opensearch.jobscheduler.rest.action.RestGetJobDetailsAction; +import org.opensearch.jobscheduler.rest.action.RestGetLockAction; +import org.opensearch.jobscheduler.rest.action.RestReleaseLockAction; +import org.opensearch.jobscheduler.spi.JobSchedulerExtension; +import org.opensearch.jobscheduler.spi.ScheduledJobParser; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; +import org.opensearch.jobscheduler.utils.JobDetailsService; +import org.opensearch.plugins.ExtensiblePlugin; +import org.opensearch.rest.RestController; +import org.opensearch.rest.RestHandler; +import org.opensearch.test.IndexSettingsModule; +import org.opensearch.test.OpenSearchTestCase; + +import org.opensearch.test.engine.MockEngineFactory; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class JobSchedulerPluginTests extends OpenSearchTestCase { + + private JobSchedulerPlugin plugin; + + private Index index; + private Settings settings; + private Settings sweeperSettings; + private IndexSettings indexSettings; + private IndexModule indexModule; + private AnalysisRegistry emptyAnalysisRegistry; + + @Mock + private RestController restController; + @Mock + private ClusterSettings clusterSettings; + @Mock + private IndexScopedSettings indexScopedSettings; + @Mock + private SettingsFilter settingsFilter; + @Mock + private IndexNameExpressionResolver indexNameExpressionResolver; + @Mock + private Supplier nodesInCluster; + + @Before + public void setup() { + plugin = new JobSchedulerPlugin(); + settings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + indexSettings = IndexSettingsModule.newIndexSettings(JobDetailsService.JOB_DETAILS_INDEX_NAME, settings); + index = indexSettings.getIndex(); + final MockEngineFactory engineFactory = new MockEngineFactory(AssertingDirectoryReader.class); + indexModule = new IndexModule( + indexSettings, + emptyAnalysisRegistry, + engineFactory, + new EngineConfigFactory(indexSettings), + Collections.emptyMap(), + () -> true, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + Collections.emptyMap() + ); + sweeperSettings = Settings.builder() + .put(JobSchedulerSettings.SWEEP_PERIOD.getKey(), TimeValue.timeValueMinutes(1)) + .put(JobSchedulerSettings.SWEEP_PAGE_SIZE.getKey(), 10) + .put(JobSchedulerSettings.SWEEP_BACKOFF_MILLIS.getKey(), TimeValue.timeValueMillis(100)) + .put(JobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT.getKey(), 5) + .build(); + } + + public void testLoadExtensions() { + ExtensiblePlugin.ExtensionLoader mockLoader = mock(ExtensiblePlugin.ExtensionLoader.class); + JobSchedulerExtension mockExtension1 = mock(JobSchedulerExtension.class); + JobSchedulerExtension mockExtension2 = mock(JobSchedulerExtension.class); + when(mockLoader.loadExtensions(JobSchedulerExtension.class)).thenReturn(Arrays.asList(mockExtension1, mockExtension2)); + when(mockExtension1.getJobType()).thenReturn("jobType1"); + when(mockExtension1.getJobIndex()).thenReturn("index1"); + when(mockExtension2.getJobType()).thenReturn("jobType2"); + when(mockExtension2.getJobIndex()).thenReturn("index2"); + ScheduledJobParser mockParser = mock(ScheduledJobParser.class); + ScheduledJobRunner mockRunner = mock(ScheduledJobRunner.class); + when(mockExtension1.getJobParser()).thenReturn(mockParser); + when(mockExtension1.getJobRunner()).thenReturn(mockRunner); + plugin.loadExtensions(mockLoader); + assertEquals(2, plugin.getIndexToJobProviders().size()); + assertTrue(plugin.getIndicesToListen().contains("index1")); + assertTrue(plugin.getIndicesToListen().contains("index2")); + } + + public void testGetSettings_returnsSettingsList() { + List> settings = plugin.getSettings(); + assertNotNull(settings); + assertEquals(12, settings.size()); + assertTrue(settings.contains(LegacyOpenDistroJobSchedulerSettings.SWEEP_PAGE_SIZE)); + assertTrue(settings.contains(LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT)); + assertTrue(settings.contains(LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_MILLIS)); + assertTrue(settings.contains(LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT)); + assertTrue(settings.contains(LegacyOpenDistroJobSchedulerSettings.SWEEP_PERIOD)); + assertTrue(settings.contains(LegacyOpenDistroJobSchedulerSettings.JITTER_LIMIT)); + assertTrue(settings.contains(JobSchedulerSettings.SWEEP_PAGE_SIZE)); + assertTrue(settings.contains(JobSchedulerSettings.REQUEST_TIMEOUT)); + assertTrue(settings.contains(JobSchedulerSettings.SWEEP_BACKOFF_MILLIS)); + assertTrue(settings.contains(JobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT)); + assertTrue(settings.contains(JobSchedulerSettings.SWEEP_PERIOD)); + assertTrue(settings.contains(JobSchedulerSettings.JITTER_LIMIT)); + } + + public void testOnIndexModule() { + assertEquals(indexModule.getIndex().toString(), index.toString()); + assertEquals(index.getName(), JobDetailsService.JOB_DETAILS_INDEX_NAME); + } + + public void testGetRestHandlers() { + List restHandlers = plugin.getRestHandlers( + settings, + restController, + clusterSettings, + indexScopedSettings, + settingsFilter, + indexNameExpressionResolver, + nodesInCluster + ); + assertThat( + restHandlers, + containsInAnyOrder( + instanceOf(RestGetJobDetailsAction.class), + instanceOf(RestGetLockAction.class), + instanceOf(RestReleaseLockAction.class) + ) + ); + } + + public void testGetIndicesToListen() { + Set expectedIndices = new HashSet<>(); + expectedIndices.add("index1"); + expectedIndices.add("index2"); + plugin.getIndicesToListen().addAll(expectedIndices); + Set actualIndices = plugin.getIndicesToListen(); + assertEquals(expectedIndices, actualIndices); + } + + public void testGetIndexToJobProviders() { + Map expectedProviders = plugin.getIndexToJobProviders(); + ScheduledJobParser mockParser = mock(ScheduledJobParser.class); + ScheduledJobRunner mockRunner = mock(ScheduledJobRunner.class); + expectedProviders.put("index1", new ScheduledJobProvider("test-job-1", "test-job-index-1", mockParser, mockRunner)); + Map actualProviders = plugin.getIndexToJobProviders(); + assertEquals(expectedProviders, actualProviders); + } +} diff --git a/src/test/java/org/opensearch/jobscheduler/ODFERestTestCase.java b/src/test/java/org/opensearch/jobscheduler/ODFERestTestCase.java index 4ead94ae..13d6f6ac 100644 --- a/src/test/java/org/opensearch/jobscheduler/ODFERestTestCase.java +++ b/src/test/java/org/opensearch/jobscheduler/ODFERestTestCase.java @@ -39,9 +39,9 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.rest.OpenSearchRestTestCase; public abstract class ODFERestTestCase extends OpenSearchRestTestCase { @@ -91,9 +91,9 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE @After protected void wipeAllODFEIndices() throws IOException { Response response = adminClient().performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); - XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType()); + MediaType mediaType = MediaType.fromMediaType(response.getEntity().getContentType()); try ( - XContentParser parser = xContentType.xContent() + XContentParser parser = mediaType.xContent() .createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, diff --git a/src/test/java/org/opensearch/jobscheduler/ScheduledJobProviderTests.java b/src/test/java/org/opensearch/jobscheduler/ScheduledJobProviderTests.java new file mode 100644 index 00000000..e1c7769c --- /dev/null +++ b/src/test/java/org/opensearch/jobscheduler/ScheduledJobProviderTests.java @@ -0,0 +1,63 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.jobscheduler; + +import org.junit.Before; +import org.mockito.Mock; +import org.opensearch.jobscheduler.spi.ScheduledJobParser; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; +import org.opensearch.test.OpenSearchTestCase; + +import static org.mockito.Mockito.mock; + +public class ScheduledJobProviderTests extends OpenSearchTestCase { + + private static final String JOB_TYPE = "test_job_type"; + private static final String JOB_INDEX_NAME = "test_job_index"; + + @Mock + private ScheduledJobParser jobParser; + + @Mock + private ScheduledJobRunner jobRunner; + + private ScheduledJobProvider scheduledJobProvider; + + @Before + public void setUp() throws Exception { + super.setUp(); + scheduledJobProvider = new ScheduledJobProvider(JOB_TYPE, JOB_INDEX_NAME, jobParser, jobRunner); + } + + public void testGetJobType() { + assertEquals(JOB_TYPE, scheduledJobProvider.getJobType()); + } + + public void testGetJobIndexName() { + assertEquals(JOB_INDEX_NAME, scheduledJobProvider.getJobIndexName()); + } + + public void testGetJobParser() { + assertEquals(jobParser, scheduledJobProvider.getJobParser()); + } + + public void testGetJobRunner() { + assertEquals(jobRunner, scheduledJobProvider.getJobRunner()); + } + + public void testConstructor() { + ScheduledJobParser parser = mock(ScheduledJobParser.class); + ScheduledJobRunner runner = mock(ScheduledJobRunner.class); + ScheduledJobProvider provider = new ScheduledJobProvider(JOB_TYPE, JOB_INDEX_NAME, parser, runner); + assertEquals(JOB_TYPE, provider.getJobType()); + assertEquals(JOB_INDEX_NAME, provider.getJobIndexName()); + assertEquals(parser, provider.getJobParser()); + assertEquals(runner, provider.getJobRunner()); + } +} diff --git a/src/test/java/org/opensearch/jobscheduler/TestHelpers.java b/src/test/java/org/opensearch/jobscheduler/TestHelpers.java index 8615cf80..2d6b0eee 100644 --- a/src/test/java/org/opensearch/jobscheduler/TestHelpers.java +++ b/src/test/java/org/opensearch/jobscheduler/TestHelpers.java @@ -20,7 +20,7 @@ import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; import org.opensearch.client.WarningsHandler; -import org.opensearch.common.bytes.BytesReference; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/src/test/java/org/opensearch/jobscheduler/model/ExtensionJobParameterTests.java b/src/test/java/org/opensearch/jobscheduler/model/ExtensionJobParameterTests.java new file mode 100644 index 00000000..c4a3a533 --- /dev/null +++ b/src/test/java/org/opensearch/jobscheduler/model/ExtensionJobParameterTests.java @@ -0,0 +1,185 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.jobscheduler.model; + +import org.junit.Before; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.schedule.Schedule; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Instant; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ExtensionJobParameterTests extends OpenSearchTestCase { + + private String jobName = "testJob"; + Schedule schedule; + private Instant lastUpdateTime = Instant.ofEpochSecond(1609459200); + private Instant enabledTime = Instant.ofEpochSecond(1609459200);; + private boolean isEnabled = true; + private Long lockDurationSeconds = 60L; + private Double jitter = 0.1; + private ExtensionJobParameter extensionJobParameter; + + public ExtensionJobParameterTests() throws IOException {} + + @Before + public void setUp() throws Exception { + super.setUp(); + extensionJobParameter = new ExtensionJobParameter( + jobName, + schedule, + lastUpdateTime, + enabledTime, + isEnabled, + lockDurationSeconds, + jitter + ); + } + + public void testConstructorWithCronSchedule() throws IOException { + String jobName = "testJob"; + Instant lastUpdateTime = Instant.ofEpochSecond(1609459200); + Instant enabledTime = Instant.ofEpochSecond(1609459200); + boolean isEnabled = true; + Long lockDurationSeconds = 60L; + Double jitter = 0.1; + assertEquals(jobName, extensionJobParameter.getName()); + assertEquals(schedule, extensionJobParameter.getSchedule()); + assertEquals(lastUpdateTime, extensionJobParameter.getLastUpdateTime()); + assertEquals(enabledTime, extensionJobParameter.getEnabledTime()); + assertTrue(extensionJobParameter.isEnabled()); + assertEquals(lockDurationSeconds, extensionJobParameter.getLockDurationSeconds()); + assertEquals(jitter, extensionJobParameter.getJitter()); + assertEquals(isEnabled, extensionJobParameter.isEnabled()); + } + + public void testExtensionJobParameterFromScheduledJobParameter() { + String jobName = "test-job"; + Instant lastUpdateTime = Instant.now(); + Instant enabledTime = Instant.now().plusSeconds(3600); + boolean isEnabled = true; + Long lockDurationSeconds = 60L; + Double jitter = 0.5; + ScheduledJobParameter mockJobParameter = mock(ScheduledJobParameter.class); + when(mockJobParameter.getName()).thenReturn(jobName); + when(mockJobParameter.getLastUpdateTime()).thenReturn(lastUpdateTime); + when(mockJobParameter.getEnabledTime()).thenReturn(enabledTime); + when(mockJobParameter.isEnabled()).thenReturn(isEnabled); + when(mockJobParameter.getLockDurationSeconds()).thenReturn(lockDurationSeconds); + when(mockJobParameter.getJitter()).thenReturn(jitter); + ExtensionJobParameter extensionJobParameter = new ExtensionJobParameter(mockJobParameter); + assertEquals(jobName, extensionJobParameter.getName()); + assertEquals(lastUpdateTime, extensionJobParameter.getLastUpdateTime()); + assertEquals(enabledTime, extensionJobParameter.getEnabledTime()); + assertEquals(isEnabled, extensionJobParameter.isEnabled()); + assertEquals(lockDurationSeconds, extensionJobParameter.getLockDurationSeconds()); + assertEquals(jitter, extensionJobParameter.getJitter(), 0.0); + } + + public void testExtensionJobParameterFromScheduledJobParameterWithNullJitter() { + String jobName = "test-job"; + Instant lastUpdateTime = Instant.now(); + Instant enabledTime = Instant.now().plusSeconds(3600); + boolean isEnabled = true; + Long lockDurationSeconds = 60L; + ScheduledJobParameter mockJobParameter = mock(ScheduledJobParameter.class); + when(mockJobParameter.getName()).thenReturn(jobName); + when(mockJobParameter.getLastUpdateTime()).thenReturn(lastUpdateTime); + when(mockJobParameter.getEnabledTime()).thenReturn(enabledTime); + when(mockJobParameter.isEnabled()).thenReturn(isEnabled); + when(mockJobParameter.getLockDurationSeconds()).thenReturn(lockDurationSeconds); + when(mockJobParameter.getJitter()).thenReturn(null); + ExtensionJobParameter extensionJobParameter = new ExtensionJobParameter(mockJobParameter); + assertEquals(0.0, extensionJobParameter.getJitter(), 0.0); + } + + public void testGetName() { + assertEquals(jobName, extensionJobParameter.getName()); + } + + public void testGetLastUpdateTime() { + assertEquals(lastUpdateTime, extensionJobParameter.getLastUpdateTime()); + } + + public void testGetEnabledTime() { + assertEquals(enabledTime, extensionJobParameter.getEnabledTime()); + } + + public void testGetSchedule() { + assertEquals(schedule, extensionJobParameter.getSchedule()); + } + + public void testIsEnabled() { + assertEquals(isEnabled, extensionJobParameter.isEnabled()); + } + + public void testGetLockDurationSeconds() { + assertEquals(lockDurationSeconds, extensionJobParameter.getLockDurationSeconds()); + } + + public void testGetJitter() { + assertEquals(jitter, extensionJobParameter.getJitter()); + } + + public void testToXContent() throws IOException { + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder(outputStream); + extensionJobParameter.toXContent(xContentBuilder, null); + xContentBuilder.flush(); + String actualOutput = outputStream.toString(StandardCharsets.UTF_8); + String expectedOutput = + "{\"name\":\"testJob\",\"schedule\":null,\"last_update_time\":1609459200000,\"enabled_time\":1609459200000,\"enabled\":true,\"lock_duration_seconds\":60,\"jitter\":0.1}"; + assertEquals(expectedOutput, actualOutput); + } + + public void testExtensionJobParameterConstructor() { + ScheduledJobParameter jobParameter = new ScheduledJobParameter() { + @Override + public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException { + return null; + } + + @Override + public String getName() { + return "TestJob"; + } + + @Override + public Instant getLastUpdateTime() { + return null; + } + + @Override + public Instant getEnabledTime() { + return null; + } + + @Override + public Schedule getSchedule() { + return null; + } + + @Override + public boolean isEnabled() { + return false; + } + }; + ExtensionJobParameter extensionJobParameter = new ExtensionJobParameter(jobParameter); + assertEquals("TestJob", extensionJobParameter.getName()); + assertEquals(0.0, extensionJobParameter.getJitter(), 0.01); // We can add a delta for double comparison + } +} diff --git a/src/test/java/org/opensearch/jobscheduler/model/JobDetailsTests.java b/src/test/java/org/opensearch/jobscheduler/model/JobDetailsTests.java new file mode 100644 index 00000000..f2a5a1d0 --- /dev/null +++ b/src/test/java/org/opensearch/jobscheduler/model/JobDetailsTests.java @@ -0,0 +1,174 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.jobscheduler.model; + +import org.junit.Before; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +public class JobDetailsTests extends OpenSearchTestCase { + + private JobDetails jobDetails; + + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + + @Before + public void setUp() throws Exception { + super.setUp(); + jobDetails = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + + } + + public void testConstructor() { + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + assertEquals(jobIndex, jobDetails.getJobIndex()); + assertEquals(jobType, jobDetails.getJobType()); + assertEquals(jobParameterAction, jobDetails.getJobParameterAction()); + assertEquals(jobRunnerAction, jobDetails.getJobRunnerAction()); + assertEquals(extensionUniqueId, jobDetails.getExtensionUniqueId()); + } + + public void testCopyConstructor() { + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + + JobDetails originalJobDetails = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + JobDetails copiedJobDetails = new JobDetails(originalJobDetails); + + assertEquals(originalJobDetails, copiedJobDetails); + } + + public void testSetters() { + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + + jobDetails.setJobIndex(jobIndex); + jobDetails.setJobType(jobType); + jobDetails.setJobParameterAction(jobParameterAction); + jobDetails.setJobRunnerAction(jobRunnerAction); + jobDetails.setExtensionUniqueId(extensionUniqueId); + + assertEquals(jobIndex, jobDetails.getJobIndex()); + assertEquals(jobType, jobDetails.getJobType()); + assertEquals(jobParameterAction, jobDetails.getJobParameterAction()); + assertEquals(jobRunnerAction, jobDetails.getJobRunnerAction()); + assertEquals(extensionUniqueId, jobDetails.getExtensionUniqueId()); + } + + public void testToXContent() throws IOException { + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + + JobDetails jobDetails = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + + XContentBuilder builder = XContentFactory.jsonBuilder(); + XContentBuilder xContentBuilder = jobDetails.toXContent(builder, null); + + String expectedJson = + "{\"job_index\":\"test_index\",\"job_type\":\"test_type\",\"job_parser_action\":\"test_parameter_action\",\"job_runner_action\":\"test_runner_action\",\"extension_unique_id\":\"test_extension_id\"}"; + assertEquals(expectedJson, xContentBuilder.toString()); + } + + public void testParseWithNullValues() throws IOException { + String json = + "{\"job_index\":null,\"job_type\":null,\"job_parser_action\":null,\"job_runner_action\":null,\"extension_unique_id\":null}"; + + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, null, new BytesArray(json.getBytes(StandardCharsets.UTF_8)).array()); + parser.nextToken(); // Advance to the START_OBJECT token + + JobDetails jobDetails = null; + try { + jobDetails = JobDetails.parse(parser); + } catch (IllegalStateException e) { + // Handle the "Can't get text on a VALUE_NULL" exception + if (e.getMessage().contains("Can't get text on a VALUE_NULL")) { + // Ignore the exception and set all fields to null + jobDetails = new JobDetails(null, null, null, null, null); + } else { + throw e; + } + } + + assertNull(jobDetails.getJobIndex()); + assertNull(jobDetails.getJobType()); + assertNull(jobDetails.getJobParameterAction()); + assertNull(jobDetails.getJobRunnerAction()); + assertNull(jobDetails.getExtensionUniqueId()); + } + + public void testEquals() { + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + + JobDetails jobDetails1 = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + JobDetails jobDetails2 = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + JobDetails jobDetails3 = new JobDetails(null, null, null, null, null); + + assertEquals(jobDetails1, jobDetails2); + assertNotNull(jobDetails1); + assertNotNull(jobDetails2); + assertNotNull(jobDetails3); + } + + public void testHashCode() { + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + + JobDetails jobDetails1 = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + JobDetails jobDetails2 = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + + assertEquals(jobDetails1.hashCode(), jobDetails2.hashCode()); + } + + public void testToString() { + String jobIndex = "test_index"; + String jobType = "test_type"; + String jobParameterAction = "test_parameter_action"; + String jobRunnerAction = "test_runner_action"; + String extensionUniqueId = "test_extension_id"; + + JobDetails jobDetails = new JobDetails(jobIndex, jobType, jobParameterAction, jobRunnerAction, extensionUniqueId); + + String expectedString = + "JobDetails{jobIndex='test_index', jobType='test_type', jobParameterAction='test_parameter_action', jobRunnerAction='test_runner_action', extensionUniqueId='test_extension_id'}"; + assertEquals(expectedString, jobDetails.toString()); + } +} diff --git a/src/test/java/org/opensearch/jobscheduler/multinode/GetLockMultiNodeRestIT.java b/src/test/java/org/opensearch/jobscheduler/multinode/GetLockMultiNodeRestIT.java index 986dfead..65edd5e3 100644 --- a/src/test/java/org/opensearch/jobscheduler/multinode/GetLockMultiNodeRestIT.java +++ b/src/test/java/org/opensearch/jobscheduler/multinode/GetLockMultiNodeRestIT.java @@ -8,8 +8,6 @@ */ package org.opensearch.jobscheduler.multinode; -import com.google.common.collect.ImmutableMap; - import java.io.IOException; import org.junit.Before; @@ -23,6 +21,8 @@ import org.opensearch.jobscheduler.transport.AcquireLockResponse; import org.opensearch.test.OpenSearchIntegTestCase; +import com.google.common.collect.ImmutableMap; + @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) public class GetLockMultiNodeRestIT extends ODFERestTestCase { @@ -30,6 +30,7 @@ public class GetLockMultiNodeRestIT extends ODFERestTestCase { private String initialJobIndexName; private Response initialGetLockResponse; + @Override @Before public void setUp() throws Exception { super.setUp(); @@ -53,6 +54,7 @@ public void testGetLockRestAPI() throws Exception { // Submit 10 requests to generate new lock models for different job indexes for (int i = 0; i < 10; i++) { + String expectedLockId = TestHelpers.generateExpectedLockId(String.valueOf(i), String.valueOf(i)); Response getLockResponse = TestHelpers.makeRequest( client(), "GET", @@ -61,10 +63,20 @@ public void testGetLockRestAPI() throws Exception { TestHelpers.toHttpEntity(TestHelpers.generateAcquireLockRequestBody(String.valueOf(i), String.valueOf(i))), null ); + // Releasing lock will test that it exists (Get by ID) + Response releaseLockResponse = TestHelpers.makeRequest( + client(), + "PUT", + TestHelpers.RELEASE_LOCK_BASE_URI + "/" + expectedLockId, + ImmutableMap.of(), + null, + null + ); + assertEquals("success", entityAsMap(releaseLockResponse).get("release-lock")); String lockId = validateResponseAndGetLockId(getLockResponse); - assertEquals(TestHelpers.generateExpectedLockId(String.valueOf(i), String.valueOf(i)), lockId); + assertEquals(expectedLockId, lockId); } } diff --git a/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsActionTests.java b/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsActionTests.java index e592427b..916eb745 100644 --- a/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsActionTests.java +++ b/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetJobDetailsActionTests.java @@ -16,9 +16,9 @@ import java.util.Map; import org.junit.Before; import org.mockito.Mockito; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.xcontent.XContentType; import org.opensearch.jobscheduler.JobSchedulerPlugin; import org.opensearch.jobscheduler.rest.request.GetJobDetailsRequest; diff --git a/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetLockActionTests.java b/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetLockActionTests.java index 1cd4c474..92e49cd9 100644 --- a/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetLockActionTests.java +++ b/src/test/java/org/opensearch/jobscheduler/rest/action/RestGetLockActionTests.java @@ -19,7 +19,7 @@ import org.mockito.Mockito; import org.opensearch.client.node.NodeClient; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java b/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java index 51d4c5f1..a6597b33 100644 --- a/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java +++ b/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java @@ -143,6 +143,33 @@ public void testReschedule_noEnableTime() { Assert.assertFalse(this.scheduler.reschedule(jobParameter, null, null, dummyVersion, jitterLimit)); } + public void testReschedule_outOfExpectTime() { + Schedule schedule = Mockito.mock(Schedule.class); + ScheduledJobParameter jobParameter = buildScheduledJobParameter( + "job-id", + "dummy job name", + Instant.now().minus(1, ChronoUnit.HOURS), + Instant.now(), + schedule, + false, + 0.6 + ); + JobSchedulingInfo jobSchedulingInfo = new JobSchedulingInfo("job-index", "job-id", jobParameter); + Instant now = Instant.now(); + jobSchedulingInfo.setDescheduled(false); + + Mockito.when(schedule.getNextExecutionTime(Mockito.any())) + .thenReturn(now.minus(10, ChronoUnit.MINUTES)) + .thenReturn(now.plus(2, ChronoUnit.MINUTES)); + + Scheduler.ScheduledCancellable cancellable = Mockito.mock(Scheduler.ScheduledCancellable.class); + Mockito.when(this.threadPool.schedule(Mockito.any(), Mockito.any(), Mockito.anyString())).thenReturn(cancellable); + + Assert.assertTrue(this.scheduler.reschedule(jobParameter, jobSchedulingInfo, null, dummyVersion, jitterLimit)); + Assert.assertEquals(cancellable, jobSchedulingInfo.getScheduledCancellable()); + Mockito.verify(this.threadPool).schedule(Mockito.any(), Mockito.any(), Mockito.anyString()); + } + public void testReschedule_jobDescheduled() { Schedule schedule = Mockito.mock(Schedule.class); ScheduledJobParameter jobParameter = buildScheduledJobParameter( diff --git a/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java b/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java index df357da1..41ced15a 100644 --- a/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java +++ b/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java @@ -20,7 +20,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.util.BytesRef; import org.opensearch.Version; -import org.opensearch.action.ActionFuture; import org.opensearch.action.delete.DeleteResponse; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterName; @@ -35,17 +34,18 @@ import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.bytes.BytesArray; -import org.opensearch.common.bytes.BytesReference; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.index.Index; +import org.opensearch.core.index.Index; import org.opensearch.index.engine.Engine; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParsedDocument; -import org.opensearch.index.shard.ShardId; +import org.opensearch.core.index.shard.ShardId; import org.opensearch.test.ClusterServiceUtils; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.Scheduler; diff --git a/src/test/java/org/opensearch/jobscheduler/utils/JobDetailsServiceIT.java b/src/test/java/org/opensearch/jobscheduler/utils/JobDetailsServiceIT.java index f9c075d4..a8287be4 100644 --- a/src/test/java/org/opensearch/jobscheduler/utils/JobDetailsServiceIT.java +++ b/src/test/java/org/opensearch/jobscheduler/utils/JobDetailsServiceIT.java @@ -9,8 +9,10 @@ package org.opensearch.jobscheduler.utils; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -21,11 +23,12 @@ import java.util.concurrent.TimeoutException; import org.junit.Before; import org.mockito.Mockito; -import org.opensearch.action.ActionListener; +import org.opensearch.core.action.ActionListener; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.io.stream.BytesStreamInput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.BytesStreamInput; import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; @@ -39,7 +42,6 @@ import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; import org.opensearch.jobscheduler.spi.utils.LockService; import org.opensearch.jobscheduler.transport.request.ExtensionJobActionRequest; -import org.opensearch.jobscheduler.transport.response.ExtensionJobActionResponse; import org.opensearch.jobscheduler.transport.request.JobParameterRequest; import org.opensearch.jobscheduler.transport.response.JobParameterResponse; import org.opensearch.jobscheduler.transport.request.JobRunnerRequest; @@ -94,6 +96,51 @@ public void setup() { ); } + /** + * Finds the index of the specified byte value within the given byte array + * + * @param bytes the byte array to process + * @param value the byte to identify index of + * @return the index of the byte value + */ + private int indexOf(byte[] bytes, byte value) { + for (int offset = 0; offset < bytes.length; ++offset) { + if (bytes[offset] == value) { + return offset; + } + } + return -1; + } + + /** + * Trims off the fully qualified request class name bytes and null byte from the ExtensionActionRequest requestBytes + * + * @param requestBytes the request bytes of an ExtensionActionRequest + * @return the trimmed array of bytes + */ + private byte[] trimRequestBytes(byte[] requestBytes) { + int pos = indexOf(requestBytes, ExtensionJobActionRequest.UNIT_SEPARATOR); + return Arrays.copyOfRange(requestBytes, pos + 1, requestBytes.length); + } + + /** + * Takes in an object of type T that extends {@link Writeable} and converts the writeable fields to a byte array + * + * @param a class that extends writeable + * @param actionParams the action parameters to be serialized + * @throws IOException if serialization fails + * @return the byte array of the parameters + */ + private static byte[] convertParamsToBytes(T actionParams) throws IOException { + // Write all to output stream + BytesStreamOutput out = new BytesStreamOutput(); + actionParams.writeTo(out); + out.flush(); + + // convert bytes stream to byte array + return BytesReference.toBytes(out.bytes()); + } + public void testGetJobDetailsSanity() throws ExecutionException, InterruptedException, TimeoutException { CompletableFuture inProgressFuture = new CompletableFuture<>(); JobDetailsService jobDetailsService = new JobDetailsService( @@ -180,13 +227,9 @@ public void testDeleteJobDetailsWithOutDocumentIdCreation() throws ExecutionExce this.indicesToListen, this.indexToJobProviders ); - jobDetailsService.deleteJobDetails( - expectedDocumentId, - ActionListener.wrap( - deleted -> { assertTrue("Failed to delete JobDetails.", deleted); }, - exception -> { fail(exception.getMessage()); } - ) - ); + jobDetailsService.deleteJobDetails(expectedDocumentId, ActionListener.wrap(deleted -> { + assertTrue("Failed to delete JobDetails.", deleted); + }, exception -> { fail(exception.getMessage()); })); } public void testDeleteNonExistingJobDetails() throws ExecutionException, InterruptedException, TimeoutException { @@ -198,13 +241,9 @@ public void testDeleteNonExistingJobDetails() throws ExecutionException, Interru ); jobDetailsService.createJobDetailsIndex(ActionListener.wrap(created -> { if (created) { - jobDetailsService.deleteJobDetails( - expectedDocumentId, - ActionListener.wrap( - deleted -> { assertTrue("Failed to delete job details for documentId.", deleted); }, - exception -> fail(exception.getMessage()) - ) - ); + jobDetailsService.deleteJobDetails(expectedDocumentId, ActionListener.wrap(deleted -> { + assertTrue("Failed to delete job details for documentId.", deleted); + }, exception -> fail(exception.getMessage()))); } else { fail("Failed to job details for extension"); } @@ -308,8 +347,11 @@ public void testJobRunnerExtensionJobActionRequest() throws IOException { actionRequest = new ExtensionActionRequest(in); + // Trim request class bytes from requestBytes + byte[] trimmedRequestBytes = trimRequestBytes(actionRequest.getRequestBytes().toByteArray()); + // Test deserialization of action request params - JobRunnerRequest deserializedRequest = new JobRunnerRequest(actionRequest.getRequestBytes()); + JobRunnerRequest deserializedRequest = new JobRunnerRequest(trimmedRequestBytes); // Test deserialization of extension job parameter document Id String deserializedDocumentId = deserializedRequest.getJobParameterDocumentId(); @@ -330,7 +372,7 @@ public void testJobParameterExtensionJobActionRequest() throws IOException { String content = "{\"test_field\":\"test\"}"; JobDocVersion jobDocVersion = new JobDocVersion(1L, 1L, 1L); XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, content.getBytes()); + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, content.getBytes(StandardCharsets.UTF_8)); // Create JobParameterRequest JobParameterRequest jobParamRequest = new JobParameterRequest("placeholder", parser, "id", jobDocVersion); @@ -343,8 +385,11 @@ public void testJobParameterExtensionJobActionRequest() throws IOException { try (BytesStreamInput in = new BytesStreamInput(BytesReference.toBytes(out.bytes()))) { actionRequest = new ExtensionActionRequest(in); + // Trim request class bytes from requestBytes + byte[] trimmedRequestBytes = trimRequestBytes(actionRequest.getRequestBytes().toByteArray()); + // Test deserialization of action request params - JobParameterRequest deserializedRequest = new JobParameterRequest(actionRequest.getRequestBytes()); + JobParameterRequest deserializedRequest = new JobParameterRequest(trimmedRequestBytes); assertEquals(jobParamRequest.getId(), deserializedRequest.getId()); assertEquals(jobParamRequest.getJobSource(), deserializedRequest.getJobSource()); @@ -354,11 +399,11 @@ public void testJobParameterExtensionJobActionRequest() throws IOException { } } - public void testJobRunnerExtensionJobActionResponse() throws IOException { + public void testJobRunnerExtensionActionResponse() throws IOException { // Create JobRunnerResponse JobRunnerResponse jobRunnerResponse = new JobRunnerResponse(true); - ExtensionActionResponse actionResponse = new ExtensionJobActionResponse(jobRunnerResponse); + ExtensionActionResponse actionResponse = new ExtensionActionResponse(convertParamsToBytes(jobRunnerResponse)); // Test ExtensionActionResponse deserialization try (BytesStreamOutput out = new BytesStreamOutput()) { @@ -376,11 +421,11 @@ public void testJobRunnerExtensionJobActionResponse() throws IOException { } - public void testJobParameterExtensionJobActionResponse() throws IOException { + public void testJobParameterExtensionActionResponse() throws IOException { // Create JobParameterResponse JobParameterResponse jobParameterResponse = new JobParameterResponse(this.extensionJobParameter); - ExtensionActionResponse actionResponse = new ExtensionJobActionResponse(jobParameterResponse); + ExtensionActionResponse actionResponse = new ExtensionActionResponse(convertParamsToBytes(jobParameterResponse)); // Test ExtensionActionReseponse deserialization try (BytesStreamOutput out = new BytesStreamOutput()) {