From 1dd231f48d8941c03693ae9388256c41370a23fd Mon Sep 17 00:00:00 2001 From: Joshua Palis Date: Thu, 29 Dec 2022 15:35:56 -0800 Subject: [PATCH] Configuring spotlessApply for Job Scheduler (#291) * Configuring spotlessApply for Job Scheduler Signed-off-by: Joshua Palis * SpotlessApply Signed-off-by: Joshua Palis Signed-off-by: Joshua Palis Signed-off-by: Prudhvi Godithi --- .codecov.yml | 2 +- .github/ISSUE_TEMPLATE/config.yml | 2 +- .github/workflows/ci.yml | 3 +- .github/workflows/links.yml | 2 +- ADMINS.md | 2 +- CONTRIBUTING.md | 2 +- DEVELOPER_GUIDE.md | 2 +- README.md | 8 +- RELEASING.md | 2 +- SECURITY.md | 2 +- build.gradle | 4 +- formatter/formatterConfig.xml | 362 ++++++++++ formatter/license-header.txt | 8 + gradle/formatting.gradle | 37 + .../SampleExtensionPlugin.java | 43 +- .../SampleExtensionRestHandler.java | 49 +- .../sampleextension/SampleJobParameter.java | 11 +- .../sampleextension/SampleJobRunner.java | 92 +-- .../SampleExtensionIntegTestCase.java | 217 +++--- .../SampleExtensionPluginIT.java | 25 +- .../sampleextension/SampleJobRunnerIT.java | 16 +- .../JobSchedulerBackwardsCompatibilityIT.java | 77 ++- settings.gradle | 2 +- .../jobscheduler/spi/JobDocVersion.java | 18 +- .../jobscheduler/spi/JobExecutionContext.java | 14 +- .../spi/JobSchedulerExtension.java | 5 +- .../jobscheduler/spi/LockModel.java | 66 +- .../spi/ScheduledJobParameter.java | 9 +- .../jobscheduler/spi/ScheduledJobParser.java | 5 +- .../jobscheduler/spi/ScheduledJobRunner.java | 5 +- .../spi/schedule/CronSchedule.java | 41 +- .../spi/schedule/IntervalSchedule.java | 56 +- .../jobscheduler/spi/schedule/Schedule.java | 7 +- .../spi/schedule/ScheduleParser.java | 35 +- .../jobscheduler/spi/utils/LockService.java | 261 ++++--- .../utils/opensearch_job_scheduler_lock.json | 2 +- .../spi/schedule/CronScheduleTests.java | 37 +- .../spi/schedule/IntervalScheduleTests.java | 59 +- .../spi/schedule/JobDocVersionTests.java | 5 +- .../spi/schedule/ScheduleParserTests.java | 25 +- .../jobscheduler/spi/utils/LockServiceIT.java | 654 ++++++++---------- .../jobscheduler/JobSchedulerPlugin.java | 72 +- .../jobscheduler/JobSchedulerSettings.java | 53 +- .../LegacyOpenDistroJobSchedulerSettings.java | 61 +- .../jobscheduler/ScheduledJobProvider.java | 5 +- .../jobscheduler/scheduler/JobScheduler.java | 45 +- .../scheduler/JobSchedulingInfo.java | 5 +- .../scheduler/ScheduledJobInfo.java | 15 +- .../jobscheduler/sweeper/JobSweeper.java | 179 +++-- .../jobscheduler/utils/VisibleForTesting.java | 5 +- .../jobscheduler/JobSchedulerPluginIT.java | 22 +- .../JobSchedulerPluginRestIT.java | 21 +- .../JobSchedulerSettingsTests.java | 54 +- .../scheduler/JobSchedulerTests.java | 78 ++- .../jobscheduler/sweeper/JobSweeperTests.java | 142 ++-- 55 files changed, 1912 insertions(+), 1119 deletions(-) create mode 100644 formatter/formatterConfig.xml create mode 100644 formatter/license-header.txt create mode 100644 gradle/formatting.gradle diff --git a/.codecov.yml b/.codecov.yml index e711a04d..00f1a267 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -9,4 +9,4 @@ coverage: project: default: target: 75% # the required coverage value - threshold: 1% # the leniency in hitting the target \ No newline at end of file + threshold: 1% # the leniency in hitting the target diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index a8199a10..67f6e2a3 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -4,4 +4,4 @@ contact_links: about: Please ask and answer questions here. - name: AWS/Amazon Security url: https://aws.amazon.com/security/vulnerability-reporting/ - about: Please report security vulnerabilities here. \ No newline at end of file + about: Please report security vulnerabilities here. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5cad559f..253910f2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,9 +33,8 @@ jobs: - name: Publish to Maven Local run: | ./gradlew publishToMavenLocal - + - name: Upload Coverage Report uses: codecov/codecov-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} - diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 67bbc97d..1eacb7b3 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -18,4 +18,4 @@ jobs: env: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - name: Fail if there were link errors - run: exit ${{ steps.lychee.outputs.exit_code }} \ No newline at end of file + run: exit ${{ steps.lychee.outputs.exit_code }} diff --git a/ADMINS.md b/ADMINS.md index 534cc1be..81f320c5 100644 --- a/ADMINS.md +++ b/ADMINS.md @@ -4,4 +4,4 @@ | --------------- | --------------------------------------- | ----------- | | Henri Yandell | [hyandell](https://github.com/hyandell) | Amazon | -[This document](https://github.com/opensearch-project/.github/blob/main/ADMINS.md) explains what admins do in this repo. and how they should be doing it. If you're interested in becoming a maintainer, see [MAINTAINERS](MAINTAINERS.md). If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). \ No newline at end of file +[This document](https://github.com/opensearch-project/.github/blob/main/ADMINS.md) explains what admins do in this repo. and how they should be doing it. If you're interested in becoming a maintainer, see [MAINTAINERS](MAINTAINERS.md). If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 24338dda..c98e32bf 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,4 @@ # Contributing to this project OpenSearch is a community project that is built and maintained by people just like **you**. -[This document](https://github.com/opensearch-project/.github/blob/main/CONTRIBUTING.md) explains how you can contribute to this and related projects. \ No newline at end of file +[This document](https://github.com/opensearch-project/.github/blob/main/CONTRIBUTING.md) explains how you can contribute to this and related projects. diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index b19599e5..7d931d42 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -80,4 +80,4 @@ See [CONTRIBUTING](CONTRIBUTING.md). ### Backport -- [Link to backport documentation](https://github.com/opensearch-project/opensearch-plugins/blob/main/BACKPORT.md) \ No newline at end of file +- [Link to backport documentation](https://github.com/opensearch-project/opensearch-plugins/blob/main/BACKPORT.md) diff --git a/README.md b/README.md index f957d8b1..883ac02e 100644 --- a/README.md +++ b/README.md @@ -26,16 +26,16 @@ monitoring the shard allocation by calling OpenSearch API and post the output to ## Getting Started Add the following to your plugin to integrate with JobScheduler -1. Gradle +1. Gradle * [Extend plugin from `opensearch-job-scheduler`](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/build.gradle#L36) * Add [this workaround](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/build.gradle#L36) to make sure that job-schedular plugin is correctly installed during integration tests. 2. Implement the following classes -* Implement these two interfaces [ScheduledJobParameter](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobParameter.java#L37) and [ScheduledJobRunner](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java#L43) +* Implement these two interfaces [ScheduledJobParameter](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobParameter.java#L37) and [ScheduledJobRunner](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java#L43) * Extend class [JobSchedulerExtension](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java#L12) 3. Add the resource file -* Create `org.opensearch.jobscheduler.spi.JobSchedulerExtension` [file](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension#L1). +* Create `org.opensearch.jobscheduler.spi.JobSchedulerExtension` [file](https://github.com/opensearch-project/job-scheduler/blob/main/sample-extension-plugin/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension#L1). ## Contributing @@ -61,4 +61,4 @@ This project is licensed under the [Apache v2.0 License](./LICENSE.txt) ## Copyright -Copyright OpenSearch Contributors. See [NOTICE](NOTICE) for details. \ No newline at end of file +Copyright OpenSearch Contributors. See [NOTICE](NOTICE) for details. diff --git a/RELEASING.md b/RELEASING.md index 6903e716..91263ac4 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -1 +1 @@ -This project follows the [OpenSearch release process](https://github.com/opensearch-project/.github/blob/main/RELEASING.md). \ No newline at end of file +This project follows the [OpenSearch release process](https://github.com/opensearch-project/.github/blob/main/RELEASING.md). diff --git a/SECURITY.md b/SECURITY.md index e0ecd902..b8629210 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,3 +1,3 @@ ## Reporting a Vulnerability -If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/) or directly via email to aws-security@amazon.com. Please do **not** create a public GitHub issue. \ No newline at end of file +If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/) or directly via email to aws-security@amazon.com. Please do **not** create a public GitHub issue. diff --git a/build.gradle b/build.gradle index 5cb5ff13..85b44944 100644 --- a/build.gradle +++ b/build.gradle @@ -26,6 +26,7 @@ buildscript { plugins { id 'nebula.ospackage' version "8.3.0" id 'java-library' + id "com.diffplug.spotless" version "6.12.0" apply false } apply plugin: 'opensearch.opensearchplugin' @@ -33,6 +34,7 @@ apply plugin: 'opensearch.testclusters' apply plugin: 'opensearch.java-rest-test' apply plugin: 'opensearch.pluginzip' apply from: 'build-tools/opensearchplugin-coverage.gradle' +apply from: 'gradle/formatting.gradle' ext { projectSubstitutions = [:] @@ -272,7 +274,7 @@ afterEvaluate { maintainer 'OpenSearch Team ' url 'https://opensearch.org/downloads.html' summary ''' - JobScheduler plugin for OpenSearch. + JobScheduler plugin for OpenSearch. Reference documentation can be found at https://docs-beta.opensearch.org/. '''.stripIndent().replace('\n', ' ').trim() } diff --git a/formatter/formatterConfig.xml b/formatter/formatterConfig.xml new file mode 100644 index 00000000..b0e1eccc --- /dev/null +++ b/formatter/formatterConfig.xml @@ -0,0 +1,362 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/formatter/license-header.txt b/formatter/license-header.txt new file mode 100644 index 00000000..cf0a0684 --- /dev/null +++ b/formatter/license-header.txt @@ -0,0 +1,8 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ diff --git a/gradle/formatting.gradle b/gradle/formatting.gradle new file mode 100644 index 00000000..4b0e899c --- /dev/null +++ b/gradle/formatting.gradle @@ -0,0 +1,37 @@ +allprojects { + project.apply plugin: "com.diffplug.spotless" + spotless { + java { + // Normally this isn't necessary, but we have Java sources in + // non-standard places + target '**/*.java' + + removeUnusedImports() + eclipse().configFile rootProject.file('formatter/formatterConfig.xml') + trimTrailingWhitespace() + endWithNewline(); + + custom 'Refuse wildcard imports', { + // Wildcard imports can't be resolved; fail the build + if (it =~ /\s+import .*\*;/) { + throw new AssertionError("Do not use wildcard imports. 'spotlessApply' cannot resolve this issue.") + } + } + + // See DEVELOPER_GUIDE.md for details of when to enable this. + if (System.getProperty('spotless.paddedcell') != null) { + paddedCell() + } + } + format 'misc', { + target '*.md', '*.gradle', '**/*.json', '**/*.yaml', '**/*.yml', '**/*.svg' + + trimTrailingWhitespace() + endWithNewline() + } + format("license", { + licenseHeaderFile("${rootProject.file("formatter/license-header.txt")}", "package "); + target("src/*/java/**/*.java") + }) + } +} \ No newline at end of file diff --git a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java index 5241750c..312db412 100644 --- a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java +++ b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPlugin.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension; import org.opensearch.jobscheduler.spi.JobSchedulerExtension; @@ -54,12 +57,19 @@ public class SampleExtensionPlugin extends Plugin implements ActionPlugin, JobSc static final String JOB_INDEX_NAME = ".scheduler_sample_extension"; @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier) { + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier + ) { SampleJobRunner jobRunner = SampleJobRunner.getJobRunnerInstance(); jobRunner.setClusterService(clusterService); jobRunner.setThreadPool(threadPool); @@ -117,7 +127,8 @@ public ScheduledJobParser getJobParser() { case SampleJobParameter.JITTER: jobParameter.setJitter(parser.doubleValue()); break; - default: XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); + default: + XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); } } return jobParameter; @@ -125,10 +136,10 @@ public ScheduledJobParser getJobParser() { } private Instant parseInstantValue(XContentParser parser) throws IOException { - if(XContentParser.Token.VALUE_NULL.equals(parser.currentToken())) { + if (XContentParser.Token.VALUE_NULL.equals(parser.currentToken())) { return null; } - if(parser.currentToken().isValue()) { + if (parser.currentToken().isValue()) { return Instant.ofEpochMilli(parser.longValue()); } XContentParserUtils.throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); @@ -136,9 +147,15 @@ private Instant parseInstantValue(XContentParser parser) throws IOException { } @Override - public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { return Collections.singletonList(new SampleExtensionRestHandler()); } } diff --git a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java index 9a3f0f6d..2a559ec3 100644 --- a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java +++ b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionRestHandler.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension; import org.opensearch.action.support.WriteRequest; @@ -32,11 +35,11 @@ * * Users need to provide "id", "index", "job_name", and "interval" parameter to schedule * a job. e.g. - * {@code + * {@code * POST /_plugins/scheduler_sample/watch?id=dashboards-job-id&job_name=watch dashboards index&index=.opensearch_dashboards_1&interval=1 * } * - * creates a job with id "dashboards-job-id" and job name "watch dashboards index", + * creates a job with id "dashboards-job-id" and job name "watch dashboards index", * which logs ".opensearch_dashboards_1" index's shards info every 1 minute * * Users can remove that job by calling @@ -52,10 +55,9 @@ public String getName() { @Override public List routes() { - return Collections.unmodifiableList(Arrays.asList( - new Route(RestRequest.Method.POST, WATCH_INDEX_URI), - new Route(RestRequest.Method.DELETE, WATCH_INDEX_URI) - )); + return Collections.unmodifiableList( + Arrays.asList(new Route(RestRequest.Method.POST, WATCH_INDEX_URI), new Route(RestRequest.Method.DELETE, WATCH_INDEX_URI)) + ); } @Override @@ -71,16 +73,21 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli String jitterString = request.param("jitter"); Double jitter = jitterString != null ? Double.parseDouble(jitterString) : null; - if(id == null || indexName ==null) { + if (id == null || indexName == null) { throw new IllegalArgumentException("Must specify id and index parameter"); } - SampleJobParameter jobParameter = new SampleJobParameter(id, jobName, indexName, - new IntervalSchedule(Instant.now(), Integer.parseInt(interval), ChronoUnit.MINUTES), lockDurationSeconds, jitter); - IndexRequest indexRequest = new IndexRequest() - .index(SampleExtensionPlugin.JOB_INDEX_NAME) - .id(id) - .source(jobParameter.toXContent(JsonXContent.contentBuilder(), null)) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + SampleJobParameter jobParameter = new SampleJobParameter( + id, + jobName, + indexName, + new IntervalSchedule(Instant.now(), Integer.parseInt(interval), ChronoUnit.MINUTES), + lockDurationSeconds, + jitter + ); + IndexRequest indexRequest = new IndexRequest().index(SampleExtensionPlugin.JOB_INDEX_NAME) + .id(id) + .source(jobParameter.toXContent(JsonXContent.contentBuilder(), null)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); return restChannel -> { // index the job parameter @@ -88,10 +95,12 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override public void onResponse(IndexResponse indexResponse) { try { - RestResponse restResponse = new BytesRestResponse(RestStatus.OK, - indexResponse.toXContent(JsonXContent.contentBuilder(), null)); + RestResponse restResponse = new BytesRestResponse( + RestStatus.OK, + indexResponse.toXContent(JsonXContent.contentBuilder(), null) + ); restChannel.sendResponse(restResponse); - } catch(IOException e) { + } catch (IOException e) { restChannel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); } } @@ -105,9 +114,7 @@ public void onFailure(Exception e) { } else if (request.method().equals(RestRequest.Method.DELETE)) { // delete job parameter doc from index String id = request.param("id"); - DeleteRequest deleteRequest = new DeleteRequest() - .index(SampleExtensionPlugin.JOB_INDEX_NAME) - .id(id); + DeleteRequest deleteRequest = new DeleteRequest().index(SampleExtensionPlugin.JOB_INDEX_NAME).id(id); return restChannel -> { client.delete(deleteRequest, new ActionListener() { diff --git a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobParameter.java b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobParameter.java index f0601cdb..c2280572 100644 --- a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobParameter.java +++ b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobParameter.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; @@ -39,8 +42,7 @@ public class SampleJobParameter implements ScheduledJobParameter { private Long lockDurationSeconds; private Double jitter; - public SampleJobParameter() { - } + public SampleJobParameter() {} public SampleJobParameter(String id, String name, String indexToWatch, Schedule schedule, Long lockDurationSeconds, Double jitter) { this.jobName = name; @@ -85,7 +87,8 @@ public Long getLockDurationSeconds() { return this.lockDurationSeconds; } - @Override public Double getJitter() { + @Override + public Double getJitter() { return jitter; } diff --git a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java index 081f1241..baef15d8 100644 --- a/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java +++ b/sample-extension-plugin/src/main/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunner.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension; import org.opensearch.action.index.IndexRequest; @@ -44,11 +47,11 @@ public class SampleJobRunner implements ScheduledJobRunner { private static SampleJobRunner INSTANCE; public static SampleJobRunner getJobRunnerInstance() { - if(INSTANCE != null) { + if (INSTANCE != null) { return INSTANCE; } synchronized (SampleJobRunner.class) { - if(INSTANCE != null) { + if (INSTANCE != null) { return INSTANCE; } INSTANCE = new SampleJobRunner(); @@ -67,21 +70,24 @@ private SampleJobRunner() { public void setClusterService(ClusterService clusterService) { this.clusterService = clusterService; } + public void setThreadPool(ThreadPool threadPool) { this.threadPool = threadPool; } + public void setClient(Client client) { this.client = client; } @Override public void runJob(ScheduledJobParameter jobParameter, JobExecutionContext context) { - if(!(jobParameter instanceof SampleJobParameter)) { - throw new IllegalStateException("Job parameter is not instance of SampleJobParameter, type: " - + jobParameter.getClass().getCanonicalName()); + if (!(jobParameter instanceof SampleJobParameter)) { + throw new IllegalStateException( + "Job parameter is not instance of SampleJobParameter, type: " + jobParameter.getClass().getCanonicalName() + ); } - if(this.clusterService == null) { + if (this.clusterService == null) { throw new IllegalStateException("ClusterService is not initialized."); } @@ -93,39 +99,36 @@ public void runJob(ScheduledJobParameter jobParameter, JobExecutionContext conte Runnable runnable = () -> { if (jobParameter.getLockDurationSeconds() != null) { - lockService.acquireLock(jobParameter, context, ActionListener.wrap( - lock -> { - if (lock == null) { - return; - } - - SampleJobParameter parameter = (SampleJobParameter) jobParameter; - StringBuilder msg = new StringBuilder(); - msg.append("Watching index ").append(parameter.getIndexToWatch()).append("\n"); - - List shardRoutingList = this.clusterService.state() - .routingTable().allShards(parameter.getIndexToWatch()); - for(ShardRouting shardRouting : shardRoutingList) { - msg.append(shardRouting.shardId().getId()).append("\t").append(shardRouting.currentNodeId()).append("\t") - .append(shardRouting.active() ? "active" : "inactive").append("\n"); - } - log.info(msg.toString()); - runTaskForIntegrationTests(parameter); - runTaskForLockIntegrationTests(parameter); - - lockService.release(lock, ActionListener.wrap( - released -> { - log.info("Released lock for job {}", jobParameter.getName()); - }, - exception -> { - throw new IllegalStateException("Failed to release lock."); - } - )); - }, - exception -> { - throw new IllegalStateException("Failed to acquire lock."); - } - )); + lockService.acquireLock(jobParameter, context, ActionListener.wrap(lock -> { + if (lock == null) { + return; + } + + SampleJobParameter parameter = (SampleJobParameter) jobParameter; + StringBuilder msg = new StringBuilder(); + msg.append("Watching index ").append(parameter.getIndexToWatch()).append("\n"); + + List shardRoutingList = this.clusterService.state().routingTable().allShards(parameter.getIndexToWatch()); + for (ShardRouting shardRouting : shardRoutingList) { + msg.append(shardRouting.shardId().getId()) + .append("\t") + .append(shardRouting.currentNodeId()) + .append("\t") + .append(shardRouting.active() ? "active" : "inactive") + .append("\n"); + } + log.info(msg.toString()); + runTaskForIntegrationTests(parameter); + runTaskForLockIntegrationTests(parameter); + + lockService.release( + lock, + ActionListener.wrap( + released -> { log.info("Released lock for job {}", jobParameter.getName()); }, + exception -> { throw new IllegalStateException("Failed to release lock."); } + ) + ); + }, exception -> { throw new IllegalStateException("Failed to acquire lock."); })); } }; @@ -133,12 +136,13 @@ public void runJob(ScheduledJobParameter jobParameter, JobExecutionContext conte } private void runTaskForIntegrationTests(SampleJobParameter jobParameter) { - this.client.index(new IndexRequest(jobParameter.getIndexToWatch()) - .id(UUID.randomUUID().toString()) - .source("{\"message\": \"message\"}", XContentType.JSON)); + this.client.index( + new IndexRequest(jobParameter.getIndexToWatch()).id(UUID.randomUUID().toString()) + .source("{\"message\": \"message\"}", XContentType.JSON) + ); } - private void runTaskForLockIntegrationTests(SampleJobParameter jobParameter) throws InterruptedException { + private void runTaskForLockIntegrationTests(SampleJobParameter jobParameter) throws InterruptedException { if (jobParameter.getName().equals("sample-job-lock-test-it")) { Thread.sleep(180000); } diff --git a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java index 7da56135..6e33e44e 100644 --- a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java +++ b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionIntegTestCase.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension; import org.apache.http.Header; @@ -44,27 +47,39 @@ protected String createWatcherJobJson(String jobId, String jobParameter) throws return createWatcherJobJsonWithClient(client(), jobId, jobParameter); } - protected SampleJobParameter createWatcherJobWithClient(RestClient client, String jobId, SampleJobParameter jobParameter) throws IOException { + protected SampleJobParameter createWatcherJobWithClient(RestClient client, String jobId, SampleJobParameter jobParameter) + throws IOException { Map params = getJobParameterAsMap(jobId, jobParameter); Response response = makeRequest(client, "POST", SampleExtensionRestHandler.WATCH_INDEX_URI, params, null); - Assert.assertEquals("Unable to create a watcher job", - RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + Assert.assertEquals("Unable to create a watcher job", RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); - Map responseJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, response.getEntity().getContent()).map(); + Map responseJson = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).map(); return getJobParameter(client, responseJson.get("_id").toString()); } protected String createWatcherJobJsonWithClient(RestClient client, String jobId, String jobParameter) throws IOException { - Response response = makeRequest(client, "PUT", - "/" + SampleExtensionPlugin.JOB_INDEX_NAME + "/_doc/" + jobId + "?refresh", - Collections.emptyMap(), - new StringEntity(jobParameter, ContentType.APPLICATION_JSON)); - Assert.assertEquals("Unable to create a watcher job", - RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + Response response = makeRequest( + client, + "PUT", + "/" + SampleExtensionPlugin.JOB_INDEX_NAME + "/_doc/" + jobId + "?refresh", + Collections.emptyMap(), + new StringEntity(jobParameter, ContentType.APPLICATION_JSON) + ); + Assert.assertEquals( + "Unable to create a watcher job", + RestStatus.CREATED, + RestStatus.fromCode(response.getStatusLine().getStatusCode()) + ); - Map responseJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, response.getEntity().getContent()).map(); + Map responseJson = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).map(); return responseJson.get("_id").toString(); } @@ -73,20 +88,30 @@ protected void deleteWatcherJob(String jobId) throws IOException { } protected void deleteWatcherJobWithClient(RestClient client, String jobId) throws IOException { - Response response = makeRequest(client, "DELETE", SampleExtensionRestHandler.WATCH_INDEX_URI, - Collections.singletonMap("id", jobId), null); + Response response = makeRequest( + client, + "DELETE", + SampleExtensionRestHandler.WATCH_INDEX_URI, + Collections.singletonMap("id", jobId), + null + ); - Assert.assertEquals("Unable to delete a watcher job", - RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + Assert.assertEquals("Unable to delete a watcher job", RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); } - protected Response makeRequest(RestClient client, String method, String endpoint, Map params, - HttpEntity entity, Header... headers) throws IOException { + protected Response makeRequest( + RestClient client, + String method, + String endpoint, + Map params, + HttpEntity entity, + Header... headers + ) throws IOException { Request request = new Request(method, endpoint); RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); options.setWarningsHandler(WarningsHandler.PERMISSIVE); - for (Header header: headers) { + for (Header header : headers) { options.addHeader(header.getName(), header.getValue()); } request.setOptions(options.build()); @@ -102,7 +127,7 @@ protected Map getJobParameterAsMap(String jobId, SampleJobParame params.put("id", jobId); params.put("job_name", jobParameter.getName()); params.put("index", jobParameter.getIndexToWatch()); - params.put("interval", String.valueOf(((IntervalSchedule)jobParameter.getSchedule()).getInterval())); + params.put("interval", String.valueOf(((IntervalSchedule) jobParameter.getSchedule()).getInterval())); params.put("lock_duration_seconds", String.valueOf(jobParameter.getLockDurationSeconds())); return params; } @@ -110,30 +135,41 @@ protected Map getJobParameterAsMap(String jobId, SampleJobParame @SuppressWarnings("unchecked") protected SampleJobParameter getJobParameter(RestClient client, String jobId) throws IOException { Request request = new Request("POST", "/" + SampleExtensionPlugin.JOB_INDEX_NAME + "/_search"); - String entity = "{\n" + - " \"query\": {\n" + - " \"match\": {\n" + - " \"_id\": {\n" + - " \"query\": \"" + jobId + "\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + String entity = "{\n" + + " \"query\": {\n" + + " \"match\": {\n" + + " \"_id\": {\n" + + " \"query\": \"" + + jobId + + "\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; request.setJsonEntity(entity); Response response = client.performRequest(request); - Map responseJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, response.getEntity().getContent()).map(); - Map hit = (Map)((List)((Map) responseJson.get("hits")).get("hits")).get(0); + Map responseJson = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).map(); + Map hit = (Map) ((List) ((Map) responseJson.get("hits")).get("hits")).get( + 0 + ); Map jobSource = (Map) hit.get("_source"); SampleJobParameter jobParameter = new SampleJobParameter(); jobParameter.setJobName(jobSource.get("name").toString()); jobParameter.setIndexToWatch(jobSource.get("index_name_to_watch").toString()); - Map jobSchedule = (Map)jobSource.get("schedule"); + Map jobSchedule = (Map) jobSource.get("schedule"); jobParameter.setSchedule( - new IntervalSchedule(Instant.ofEpochMilli(Long.parseLong(((Map)jobSchedule.get("interval")).get("start_time").toString())), - Integer.parseInt(((Map)jobSchedule.get("interval")).get("period").toString()), ChronoUnit.MINUTES)); + new IntervalSchedule( + Instant.ofEpochMilli(Long.parseLong(((Map) jobSchedule.get("interval")).get("start_time").toString())), + Integer.parseInt(((Map) jobSchedule.get("interval")).get("period").toString()), + ChronoUnit.MINUTES + ) + ); jobParameter.setLockDurationSeconds(Long.parseLong(jobSource.get("lock_duration_seconds").toString())); return jobParameter; } @@ -145,9 +181,7 @@ protected String createTestIndex() throws IOException { } protected void createTestIndex(String index) throws IOException { - createIndex(index, Settings.builder() - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 0).build()); + createIndex(index, Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); } protected void deleteTestIndex(String index) throws IOException { @@ -155,16 +189,19 @@ protected void deleteTestIndex(String index) throws IOException { } protected long countRecordsInTestIndex(String index) throws IOException { - String entity = "{\n" + - " \"query\": {\n" + - " \"match_all\": {\n" + - " }\n" + - " }\n" + - "}"; - Response response = makeRequest(client(), "POST", "/" + index + "/_count", - Collections.emptyMap(), new StringEntity(entity, ContentType.APPLICATION_JSON)); - Map responseJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, response.getEntity().getContent()).map(); + String entity = "{\n" + " \"query\": {\n" + " \"match_all\": {\n" + " }\n" + " }\n" + "}"; + Response response = makeRequest( + client(), + "POST", + "/" + index + "/_count", + Collections.emptyMap(), + new StringEntity(entity, ContentType.APPLICATION_JSON) + ); + Map responseJson = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).map(); return Integer.parseInt(responseJson.get("count").toString()); } @@ -172,6 +209,7 @@ protected void waitAndCreateWatcherJob(String prevIndex, String jobId, SampleJob Timer timer = new Timer(); TimerTask timerTask = new TimerTask() { private int timeoutCounter = 0; + @Override public void run() { try { @@ -199,6 +237,7 @@ protected void waitAndDeleteWatcherJob(String prevIndex, String jobId) { Timer timer = new Timer(); TimerTask timerTask = new TimerTask() { private int timeoutCounter = 0; + @Override public void run() { try { @@ -229,20 +268,30 @@ protected long waitAndCountRecords(String index, long waitForInMs) throws Except @SuppressWarnings("unchecked") protected long getLockTimeByJobId(String jobId) throws IOException { - String entity = "{\n" + - " \"query\": {\n" + - " \"match\": {\n" + - " \"job_id\": {\n" + - " \"query\": \"" + jobId + "\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - Response response = makeRequest(client(), "POST", "/" + ".opendistro-job-scheduler-lock" + "/_search", - Collections.emptyMap(), new StringEntity(entity, ContentType.APPLICATION_JSON)); - Map responseJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, response.getEntity().getContent()).map(); - List> hits = (List>)((Map) responseJson.get("hits")).get("hits"); + String entity = "{\n" + + " \"query\": {\n" + + " \"match\": {\n" + + " \"job_id\": {\n" + + " \"query\": \"" + + jobId + + "\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + Response response = makeRequest( + client(), + "POST", + "/" + ".opendistro-job-scheduler-lock" + "/_search", + Collections.emptyMap(), + new StringEntity(entity, ContentType.APPLICATION_JSON) + ); + Map responseJson = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).map(); + List> hits = (List>) ((Map) responseJson.get("hits")).get("hits"); if (hits.size() == 0) { return 0L; } @@ -252,20 +301,30 @@ protected long getLockTimeByJobId(String jobId) throws IOException { @SuppressWarnings("unchecked") protected boolean doesLockExistByLockTime(long lockTime) throws IOException { - String entity = "{\n" + - " \"query\": {\n" + - " \"match\": {\n" + - " \"lock_time\": {\n" + - " \"query\": " + lockTime + "\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - Response response = makeRequest(client(), "POST", "/" + ".opendistro-job-scheduler-lock" + "/_search", - Collections.emptyMap(), new StringEntity(entity, ContentType.APPLICATION_JSON)); - Map responseJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, response.getEntity().getContent()).map(); - List> hits = (List>)((Map) responseJson.get("hits")).get("hits"); + String entity = "{\n" + + " \"query\": {\n" + + " \"match\": {\n" + + " \"lock_time\": {\n" + + " \"query\": " + + lockTime + + "\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + Response response = makeRequest( + client(), + "POST", + "/" + ".opendistro-job-scheduler-lock" + "/_search", + Collections.emptyMap(), + new StringEntity(entity, ContentType.APPLICATION_JSON) + ); + Map responseJson = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).map(); + List> hits = (List>) ((Map) responseJson.get("hits")).get("hits"); return hits.size() == 1; } -} \ No newline at end of file +} diff --git a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPluginIT.java b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPluginIT.java index 3c37a263..4428ee93 100644 --- a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPluginIT.java +++ b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleExtensionPluginIT.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension; import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; @@ -30,14 +33,16 @@ public void testPluginsAreInstalled() { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); nodesInfoRequest.addMetric(NodesInfoRequest.Metric.PLUGINS.metricName()); - NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest) - .actionGet(); - List pluginInfos = nodesInfoResponse.getNodes().stream() - .flatMap((Function>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class) - .getPluginInfos().stream()).collect(Collectors.toList()); - Assert.assertTrue(pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName() - .equals("opensearch-job-scheduler"))); - Assert.assertTrue(pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName() - .equals("opensearch-job-scheduler-sample-extension"))); + NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); + List pluginInfos = nodesInfoResponse.getNodes() + .stream() + .flatMap( + (Function>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream() + ) + .collect(Collectors.toList()); + Assert.assertTrue(pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName().equals("opensearch-job-scheduler"))); + Assert.assertTrue( + pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName().equals("opensearch-job-scheduler-sample-extension")) + ); } } diff --git a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunnerIT.java b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunnerIT.java index aac121e8..9f63e9d5 100644 --- a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunnerIT.java +++ b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/SampleJobRunnerIT.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension; import org.junit.Assert; @@ -69,7 +72,8 @@ public void testJobUpdateWithRescheduleJob() throws Exception { String newIndex = createTestIndex(); jobParameter.setIndexToWatch(newIndex); - // wait till the job runner runs for the first time after 1 min & inserts a record into the watched index & then update the job with new params. + // wait till the job runner runs for the first time after 1 min & inserts a record into the watched index & then update the job with + // new params. waitAndCreateWatcherJob(schedJobParameter.getIndexToWatch(), jobId, jobParameter); long actualCount = waitAndCountRecords(newIndex, 130000); @@ -86,7 +90,8 @@ public void testAcquiredLockPreventExecOfTasks() throws Exception { SampleJobParameter jobParameter = new SampleJobParameter(); jobParameter.setJobName("sample-job-lock-test-it"); jobParameter.setIndexToWatch(index); - // ensures that the next job tries to run even before the previous job finished & released its lock. Also look at SampleJobRunner.runTaskForLockIntegrationTests + // ensures that the next job tries to run even before the previous job finished & released its lock. Also look at + // SampleJobRunner.runTaskForLockIntegrationTests jobParameter.setSchedule(new IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES)); jobParameter.setLockDurationSeconds(120L); @@ -107,10 +112,11 @@ public void testAcquiredLockPreventExecOfTasks() throws Exception { Assert.assertEquals(1, actualCount); Assert.assertTrue(doesLockExistByLockTime(lockTime)); - // Asserts that the new job ran after 2 mins after the first job lock is released. Hence new record is inserted into the watched index. + // Asserts that the new job ran after 2 mins after the first job lock is released. Hence new record is inserted into the watched + // index. // Also asserts that the old lock is released. actualCount = waitAndCountRecords(index, 130000); Assert.assertEquals(2, actualCount); Assert.assertFalse(doesLockExistByLockTime(lockTime)); } -} \ No newline at end of file +} diff --git a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/bwc/JobSchedulerBackwardsCompatibilityIT.java b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/bwc/JobSchedulerBackwardsCompatibilityIT.java index 61566ebf..149b4107 100644 --- a/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/bwc/JobSchedulerBackwardsCompatibilityIT.java +++ b/sample-extension-plugin/src/test/java/org/opensearch/jobscheduler/sampleextension/bwc/JobSchedulerBackwardsCompatibilityIT.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sampleextension.bwc; import org.junit.Assert; @@ -30,7 +33,7 @@ public void testBackwardsCompatibility() throws Exception { Map response = getAsMap(uri); Map> responseMap = (Map>) response.get("nodes"); - for (Map respValues: responseMap.values()) { + for (Map respValues : responseMap.values()) { List> plugins = (List>) respValues.get("plugins"); List pluginNames = plugins.stream().map(plugin -> plugin.get("name").toString()).collect(Collectors.toList()); @@ -50,13 +53,19 @@ public void testBackwardsCompatibility() throws Exception { * we cannot trigger a call for scheduling watcher job, as the older nodes do not have sample-extension plugin. */ Map responseForOldNode = getAsMap(getPluginUriForMixedCluster("third")); - Map> responseMapForOldNode = (Map>) responseForOldNode.get("nodes"); + Map> responseMapForOldNode = (Map>) responseForOldNode.get( + "nodes" + ); - for (Map respValuesForOldNode: responseMapForOldNode.values()) { + for (Map respValuesForOldNode : responseMapForOldNode.values()) { List> pluginsForOldNode = (List>) respValuesForOldNode.get("plugins"); - List pluginNamesForOldNode = pluginsForOldNode.stream().map(plugin -> plugin.get("name").toString()).collect(Collectors.toList()); - Assert.assertTrue("third".equals(System.getProperty("tests.rest.bwcsuite_round")) || - pluginNamesForOldNode.contains("opendistro-job-scheduler")); + List pluginNamesForOldNode = pluginsForOldNode.stream() + .map(plugin -> plugin.get("name").toString()) + .collect(Collectors.toList()); + Assert.assertTrue( + "third".equals(System.getProperty("tests.rest.bwcsuite_round")) + || pluginNamesForOldNode.contains("opendistro-job-scheduler") + ); } case UPGRADED: /* @@ -79,16 +88,20 @@ private String getPluginUri() { case MIXED: { return getPluginUriForMixedCluster(System.getProperty("tests.rest.bwcsuite_round")); } - case UPGRADED: return "_nodes/plugins"; + case UPGRADED: + return "_nodes/plugins"; } return null; } private String getPluginUriForMixedCluster(String node) { switch (node) { - case "second": return "_nodes/" + CLUSTER_NAME + "-1/plugins"; - case "third": return "_nodes/" + CLUSTER_NAME + "-2/plugins"; - default: return "_nodes/" + CLUSTER_NAME + "-0/plugins"; + case "second": + return "_nodes/" + CLUSTER_NAME + "-1/plugins"; + case "third": + return "_nodes/" + CLUSTER_NAME + "-2/plugins"; + default: + return "_nodes/" + CLUSTER_NAME + "-0/plugins"; } } @@ -96,6 +109,7 @@ private enum ClusterType { OLD, MIXED, UPGRADED; + static ClusterType parse(String value) { switch (value) { case "old_cluster": @@ -120,22 +134,29 @@ private void createBasicWatcherJob() throws Exception { * Thus, failure to schedule the job would mean, backward incompatible changes were made in the serde logic. * & the assert would fail. */ - String jobParameter = - "{" + - "\"name\":\"sample-job-it\"," + - "\"enabled\":true," + - "\"enabled_time\":" + now.toEpochMilli() + ", " + - "\"last_update_time\":" + now.toEpochMilli() + ", " + - "\"schedule\":{" + - "\"interval\":{" + - "\"start_time\":" + now.toEpochMilli() + "," + - "\"period\":1," + - "\"unit\":\"Minutes\"" + - "}" + - "}," + - "\"index_name_to_watch\":\"" + index + "\"," + - "\"lock_duration_seconds\":120" + - "}"; + String jobParameter = "{" + + "\"name\":\"sample-job-it\"," + + "\"enabled\":true," + + "\"enabled_time\":" + + now.toEpochMilli() + + ", " + + "\"last_update_time\":" + + now.toEpochMilli() + + ", " + + "\"schedule\":{" + + "\"interval\":{" + + "\"start_time\":" + + now.toEpochMilli() + + "," + + "\"period\":1," + + "\"unit\":\"Minutes\"" + + "}" + + "}," + + "\"index_name_to_watch\":\"" + + index + + "\"," + + "\"lock_duration_seconds\":120" + + "}"; // Creates a new watcher job. String jobId = OpenSearchRestTestCase.randomAlphaOfLength(10); @@ -144,4 +165,4 @@ private void createBasicWatcherJob() throws Exception { long actualCount = waitAndCountRecords(index, 80000); Assert.assertEquals(1, actualCount); } -} \ No newline at end of file +} diff --git a/settings.gradle b/settings.gradle index 5a11a2b5..89529542 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,4 +10,4 @@ project(":spi").name = rootProject.name + "-spi" include "sample-extension-plugin" project(":sample-extension-plugin").name = rootProject.name + "-sample-extension" -startParameter.excludedTaskNames=["publishPluginZipPublicationToMavenLocal", "publishPluginZipPublicationToStagingRepository"] \ No newline at end of file +startParameter.excludedTaskNames=["publishPluginZipPublicationToMavenLocal", "publishPluginZipPublicationToStagingRepository"] diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java index 72b7626e..39fedfd0 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobDocVersion.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi; import java.util.Locale; @@ -50,10 +53,10 @@ public int compareTo(JobDocVersion v) { if (this.seqNo > v.seqNo) { return 1; } - if(this.primaryTerm < v.primaryTerm) { + if (this.primaryTerm < v.primaryTerm) { return -1; } - if(this.primaryTerm > v.primaryTerm) { + if (this.primaryTerm > v.primaryTerm) { return 1; } return 0; @@ -61,7 +64,12 @@ public int compareTo(JobDocVersion v) { @Override public String toString() { - return String.format(Locale.getDefault(), "{_version: %s, _primary_term: %s, _seq_no: %s}", this.version, - this.primaryTerm, this.seqNo); + return String.format( + Locale.getDefault(), + "{_version: %s, _primary_term: %s, _seq_no: %s}", + this.version, + this.primaryTerm, + this.seqNo + ); } } diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java index 8749674d..42e5b5b6 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobExecutionContext.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi; import org.opensearch.jobscheduler.spi.utils.LockService; @@ -16,8 +19,13 @@ public class JobExecutionContext { private final String jobIndexName; private final String jobId; - public JobExecutionContext(Instant expectedExecutionTime, JobDocVersion jobVersion, LockService lockService, - String jobIndexName, String jobId) { + public JobExecutionContext( + Instant expectedExecutionTime, + JobDocVersion jobVersion, + LockService lockService, + String jobIndexName, + String jobId + ) { this.expectedExecutionTime = expectedExecutionTime; this.jobVersion = jobVersion; this.lockService = lockService; diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobSchedulerExtension.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobSchedulerExtension.java index 9992560b..abded740 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/JobSchedulerExtension.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/JobSchedulerExtension.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi; /** diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java index 767d6054..2da7d561 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/LockModel.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi; import org.opensearch.common.Strings; @@ -43,8 +46,7 @@ public final class LockModel implements ToXContentObject { * @param primaryTerm primary term from OpenSearch document. */ public LockModel(final LockModel copyLock, long seqNo, long primaryTerm) { - this(copyLock.jobIndexName, copyLock.jobId, copyLock.lockTime, copyLock.lockDurationSeconds, - copyLock.released, seqNo, primaryTerm); + this(copyLock.jobIndexName, copyLock.jobId, copyLock.lockTime, copyLock.lockDurationSeconds, copyLock.released, seqNo, primaryTerm); } /** @@ -54,8 +56,15 @@ public LockModel(final LockModel copyLock, long seqNo, long primaryTerm) { * @param released boolean flag to indicate if the lock is released */ public LockModel(final LockModel copyLock, final boolean released) { - this(copyLock.jobIndexName, copyLock.jobId, copyLock.lockTime, copyLock.lockDurationSeconds, - released, copyLock.seqNo, copyLock.primaryTerm); + this( + copyLock.jobIndexName, + copyLock.jobId, + copyLock.lockTime, + copyLock.lockDurationSeconds, + released, + copyLock.seqNo, + copyLock.primaryTerm + ); } /** @@ -66,18 +75,31 @@ public LockModel(final LockModel copyLock, final boolean released) { * @param lockDurationSeconds total lock duration in seconds. * @param released boolean flag to indicate if the lock is released */ - public LockModel(final LockModel copyLock, - final Instant updateLockTime, final long lockDurationSeconds, final boolean released) { + public LockModel(final LockModel copyLock, final Instant updateLockTime, final long lockDurationSeconds, final boolean released) { this(copyLock.jobIndexName, copyLock.jobId, updateLockTime, lockDurationSeconds, released, copyLock.seqNo, copyLock.primaryTerm); } public LockModel(String jobIndexName, String jobId, Instant lockTime, long lockDurationSeconds, boolean released) { - this(jobIndexName, jobId, lockTime, lockDurationSeconds, released, - SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM); + this( + jobIndexName, + jobId, + lockTime, + lockDurationSeconds, + released, + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ); } - public LockModel(String jobIndexName, String jobId, Instant lockTime, - long lockDurationSeconds, boolean released, long seqNo, long primaryTerm) { + public LockModel( + String jobIndexName, + String jobId, + Instant lockTime, + long lockDurationSeconds, + boolean released, + long seqNo, + long primaryTerm + ) { this.lockId = jobIndexName + LOCK_ID_DELIMITR + jobId; this.jobIndexName = jobIndexName; // The jobId parameter does not necessarily need to represent the id of a job scheduler job, as it is being used @@ -137,7 +159,8 @@ public static LockModel parse(final XContentParser parser, long seqNo, long prim ); } - @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject() .field(JOB_INDEX_NAME, this.jobIndexName) .field(JOB_ID, this.jobId) @@ -148,7 +171,8 @@ public static LockModel parse(final XContentParser parser, long seqNo, long prim return builder; } - @Override public String toString() { + @Override + public String toString() { return Strings.toString(this, false, true); } @@ -193,14 +217,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LockModel lockModel = (LockModel) o; - return lockDurationSeconds == lockModel.lockDurationSeconds && - released == lockModel.released && - seqNo == lockModel.seqNo && - primaryTerm == lockModel.primaryTerm && - lockId.equals(lockModel.lockId) && - jobIndexName.equals(lockModel.jobIndexName) && - jobId.equals(lockModel.jobId) && - lockTime.equals(lockModel.lockTime); + return lockDurationSeconds == lockModel.lockDurationSeconds + && released == lockModel.released + && seqNo == lockModel.seqNo + && primaryTerm == lockModel.primaryTerm + && lockId.equals(lockModel.lockId) + && jobIndexName.equals(lockModel.jobIndexName) + && jobId.equals(lockModel.jobId) + && lockTime.equals(lockModel.lockTime); } @Override diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParameter.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParameter.java index 6acdd134..6958ea29 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParameter.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParameter.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi; import org.opensearch.jobscheduler.spi.schedule.Schedule; @@ -61,5 +64,7 @@ default Long getLockDurationSeconds() { * * @return job execution jitter */ - default Double getJitter() {return null;} + default Double getJitter() { + return null; + } } diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParser.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParser.java index b76d4366..1c056749 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParser.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobParser.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi; import org.opensearch.common.xcontent.XContentParser; diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobRunner.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobRunner.java index a9b5ae4c..50815cc0 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobRunner.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/ScheduledJobRunner.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi; public interface ScheduledJobRunner { diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java index 2b993112..afa7923d 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/CronSchedule.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import com.cronutils.model.CronType; @@ -80,7 +83,9 @@ public String getCronExpression() { return this.expression; } - public Long getDelay() { return this.scheduleDelay; } + public Long getDelay() { + return this.scheduleDelay; + } @Override public Instant getNextExecutionTime(Instant time) { @@ -115,7 +120,9 @@ public Tuple getPeriodStartingAt(Instant startTime) { realStartTime = startTime; } else { Instant now = this.clock.instant(); - Optional lastExecutionTime = this.executionTime.lastExecution(ZonedDateTime.ofInstant(now.minusMillis(delay), this.timezone)); + Optional lastExecutionTime = this.executionTime.lastExecution( + ZonedDateTime.ofInstant(now.minusMillis(delay), this.timezone) + ); if (!lastExecutionTime.isPresent()) { return new Tuple<>(now, now); } @@ -152,22 +159,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws private XContentBuilder toXContentNoDelay(XContentBuilder builder) throws IOException { builder.startObject() - .startObject(CRON_FIELD) - .field(EXPRESSION_FIELD, this.expression) - .field(TIMEZONE_FIELD, this.timezone.getId()) - .endObject() - .endObject(); + .startObject(CRON_FIELD) + .field(EXPRESSION_FIELD, this.expression) + .field(TIMEZONE_FIELD, this.timezone.getId()) + .endObject() + .endObject(); return builder; } private XContentBuilder toXContentWithDelay(XContentBuilder builder) throws IOException { builder.startObject() - .startObject(CRON_FIELD) - .field(EXPRESSION_FIELD, this.expression) - .field(TIMEZONE_FIELD, this.timezone.getId()) - .field(DELAY_FIELD, this.scheduleDelay) - .endObject() - .endObject(); + .startObject(CRON_FIELD) + .field(EXPRESSION_FIELD, this.expression) + .field(TIMEZONE_FIELD, this.timezone.getId()) + .field(DELAY_FIELD, this.scheduleDelay) + .endObject() + .endObject(); return builder; } @@ -181,9 +188,9 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CronSchedule cronSchedule = (CronSchedule) o; - return timezone.equals(cronSchedule.timezone) && - expression.equals(cronSchedule.expression) && - Objects.equals(scheduleDelay, cronSchedule.scheduleDelay); + return timezone.equals(cronSchedule.timezone) + && expression.equals(cronSchedule.expression) + && Objects.equals(scheduleDelay, cronSchedule.scheduleDelay); } @Override diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java index 2ad501ca..19e927cb 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/IntervalSchedule.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import com.cronutils.utils.VisibleForTesting; @@ -56,8 +59,8 @@ public class IntervalSchedule implements Schedule { public IntervalSchedule(Instant startTime, int interval, ChronoUnit unit) { if (!SUPPORTED_UNITS.contains(unit)) { throw new IllegalArgumentException( - String.format(Locale.ROOT, "Interval unit %s is not supported, expects %s", - unit, SUPPORTED_UNITS)); + String.format(Locale.ROOT, "Interval unit %s is not supported, expects %s", unit, SUPPORTED_UNITS) + ); } this.initialStartTime = startTime; this.startTimeWithDelay = startTime; @@ -95,7 +98,9 @@ public ChronoUnit getUnit() { return this.unit; } - public Long getDelay() { return this.scheduleDelay; } + public Long getDelay() { + return this.scheduleDelay; + } @Override public Instant getNextExecutionTime(Instant time) { @@ -145,7 +150,7 @@ public Boolean runningOnTime(Instant lastExecutionTime) { long expectedLastExecutionTime = now.toEpochMilli() - expectedMillisSinceLastExecution; long expectedCurrentExecutionTime = expectedLastExecutionTime + this.intervalInMillis; return Math.abs(lastExecutionTime.toEpochMilli() - expectedLastExecutionTime) < 1000 - || Math.abs(lastExecutionTime.toEpochMilli() - expectedCurrentExecutionTime) < 1000; + || Math.abs(lastExecutionTime.toEpochMilli() - expectedCurrentExecutionTime) < 1000; } @Override @@ -155,24 +160,24 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws private XContentBuilder toXContentNoDelay(XContentBuilder builder) throws IOException { builder.startObject() - .startObject(INTERVAL_FIELD) - .field(START_TIME_FIELD, this.initialStartTime.toEpochMilli()) - .field(PERIOD_FIELD, this.interval) - .field(UNIT_FIELD, this.unit) - .endObject() - .endObject(); + .startObject(INTERVAL_FIELD) + .field(START_TIME_FIELD, this.initialStartTime.toEpochMilli()) + .field(PERIOD_FIELD, this.interval) + .field(UNIT_FIELD, this.unit) + .endObject() + .endObject(); return builder; } private XContentBuilder toXContentWithDelay(XContentBuilder builder) throws IOException { builder.startObject() - .startObject(INTERVAL_FIELD) - .field(START_TIME_FIELD, this.initialStartTime.toEpochMilli()) - .field(PERIOD_FIELD, this.interval) - .field(UNIT_FIELD, this.unit) - .field(DELAY_FIELD, this.scheduleDelay) - .endObject() - .endObject(); + .startObject(INTERVAL_FIELD) + .field(START_TIME_FIELD, this.initialStartTime.toEpochMilli()) + .field(PERIOD_FIELD, this.interval) + .field(UNIT_FIELD, this.unit) + .field(DELAY_FIELD, this.scheduleDelay) + .endObject() + .endObject(); return builder; } @@ -191,17 +196,18 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IntervalSchedule intervalSchedule = (IntervalSchedule) o; - return initialStartTime.equals(intervalSchedule.initialStartTime) && - interval == intervalSchedule.interval && - unit == intervalSchedule.unit && - intervalInMillis == intervalSchedule.intervalInMillis && - Objects.equals(scheduleDelay, intervalSchedule.scheduleDelay); + return initialStartTime.equals(intervalSchedule.initialStartTime) + && interval == intervalSchedule.interval + && unit == intervalSchedule.unit + && intervalInMillis == intervalSchedule.intervalInMillis + && Objects.equals(scheduleDelay, intervalSchedule.scheduleDelay); } @Override public int hashCode() { - return scheduleDelay == null ? Objects.hash(initialStartTime, interval, unit, intervalInMillis) : - Objects.hash(initialStartTime, interval, unit, intervalInMillis, scheduleDelay); + return scheduleDelay == null + ? Objects.hash(initialStartTime, interval, unit, intervalInMillis) + : Objects.hash(initialStartTime, interval, unit, intervalInMillis, scheduleDelay); } @Override diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java index 3ff11d9f..84d1429e 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/Schedule.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import org.opensearch.common.collect.Tuple; @@ -53,4 +56,4 @@ public interface Schedule extends Writeable, ToXContentObject { */ Long getDelay(); -} \ No newline at end of file +} diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java index 503238e3..c1bfdae6 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParser.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import org.opensearch.common.xcontent.XContentParser; @@ -21,7 +24,7 @@ public class ScheduleParser { public static Schedule parse(XContentParser parser) throws IOException { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - while(!XContentParser.Token.END_OBJECT.equals(parser.nextToken())) { + while (!XContentParser.Token.END_OBJECT.equals(parser.nextToken())) { String fieldName = parser.currentName(); parser.nextToken(); switch (fieldName) { @@ -33,19 +36,20 @@ public static Schedule parse(XContentParser parser) throws IOException { String cronField = parser.currentName(); parser.nextToken(); switch (cronField) { - case CronSchedule.EXPRESSION_FIELD: expression = parser.text(); + case CronSchedule.EXPRESSION_FIELD: + expression = parser.text(); break; - case CronSchedule.TIMEZONE_FIELD: timezone = ZoneId.of(parser.text()); + case CronSchedule.TIMEZONE_FIELD: + timezone = ZoneId.of(parser.text()); break; - case Schedule.DELAY_FIELD: cronDelay = parser.longValue(); + case Schedule.DELAY_FIELD: + cronDelay = parser.longValue(); break; default: - throw new IllegalArgumentException( - String.format(Locale.ROOT, "Unknown cron field %s", cronField)); + throw new IllegalArgumentException(String.format(Locale.ROOT, "Unknown cron field %s", cronField)); } } - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), - parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser); parser.nextToken(); return cronDelay == null ? new CronSchedule(expression, timezone) : new CronSchedule(expression, timezone, cronDelay); case IntervalSchedule.INTERVAL_FIELD: @@ -70,17 +74,16 @@ public static Schedule parse(XContentParser parser) throws IOException { intervalDelay = parser.longValue(); break; default: - throw new IllegalArgumentException( - String.format(Locale.ROOT, "Unknown interval field %s", intervalField)); + throw new IllegalArgumentException(String.format(Locale.ROOT, "Unknown interval field %s", intervalField)); } } - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), - parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser); parser.nextToken(); - return intervalDelay == null ? new IntervalSchedule(startTime, period, unit) : new IntervalSchedule(startTime, period, unit, intervalDelay); + return intervalDelay == null + ? new IntervalSchedule(startTime, period, unit) + : new IntervalSchedule(startTime, period, unit, intervalDelay); default: - throw new IllegalArgumentException( - String.format(Locale.ROOT, "Unknown schedule type %s", fieldName)); + throw new IllegalArgumentException(String.format(Locale.ROOT, "Unknown schedule type %s", fieldName)); } } throw new IllegalArgumentException("Invalid schedule document object."); diff --git a/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java b/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java index 9b135bb4..a4660511 100644 --- a/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java +++ b/spi/src/main/java/org/opensearch/jobscheduler/spi/utils/LockService.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.utils; import org.opensearch.jobscheduler.spi.JobExecutionContext; @@ -59,7 +62,7 @@ private String lockMapping() { InputStream in = LockService.class.getResourceAsStream("opensearch_job_scheduler_lock.json"); StringBuilder stringBuilder = new StringBuilder(); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); - for (String line; (line = bufferedReader.readLine()) != null; ) { + for (String line; (line = bufferedReader.readLine()) != null;) { stringBuilder.append(line); } return stringBuilder.toString(); @@ -78,17 +81,16 @@ void createLockIndex(ActionListener listener) { listener.onResponse(true); } else { final CreateIndexRequest request = new CreateIndexRequest(LOCK_INDEX_NAME).mapping(lockMapping()); - client.admin().indices().create(request, ActionListener.wrap( - response -> listener.onResponse(response.isAcknowledged()), - exception -> { - if (exception instanceof ResourceAlreadyExistsException - || exception.getCause() instanceof ResourceAlreadyExistsException) { - listener.onResponse(true); - } else { - listener.onFailure(exception); - } - } - )); + client.admin() + .indices() + .create(request, ActionListener.wrap(response -> listener.onResponse(response.isAcknowledged()), exception -> { + if (exception instanceof ResourceAlreadyExistsException + || exception.getCause() instanceof ResourceAlreadyExistsException) { + listener.onResponse(true); + } else { + listener.onFailure(exception); + } + })); } } @@ -102,8 +104,11 @@ void createLockIndex(ActionListener listener) { * or else null. Passes {@code IllegalArgumentException} to onFailure if the {@code ScheduledJobParameter} does not * have {@code LockDurationSeconds}. */ - public void acquireLock(final ScheduledJobParameter jobParameter, - final JobExecutionContext context, ActionListener listener) { + public void acquireLock( + final ScheduledJobParameter jobParameter, + final JobExecutionContext context, + ActionListener listener + ) { final String jobIndexName = context.getJobIndexName(); final String jobId = context.getJobId(); final long lockDurationSeconds = jobParameter.getLockDurationSeconds(); @@ -123,10 +128,12 @@ public void acquireLock(final ScheduledJobParameter jobParameter, * or else null. Passes {@code IllegalArgumentException} to onFailure if the {@code ScheduledJobParameter} does not * have {@code LockDurationSeconds}. */ - public void acquireLockWithId(final String jobIndexName, - final Long lockDurationSeconds, - final String lockId, - ActionListener listener) { + public void acquireLockWithId( + final String jobIndexName, + final Long lockDurationSeconds, + final String lockId, + ActionListener listener + ) { if (lockDurationSeconds == null) { listener.onFailure(new IllegalArgumentException("Job LockDuration should not be null")); } else if (jobIndexName == null) { @@ -134,45 +141,37 @@ public void acquireLockWithId(final String jobIndexName, } else if (lockId == null) { listener.onFailure(new IllegalArgumentException("Lock ID should not be null")); } else { - createLockIndex(ActionListener.wrap( - created -> { - if (created) { - try { - findLock(LockModel.generateLockId(jobIndexName, lockId), ActionListener.wrap( - existingLock -> { - if (existingLock != null) { - if (isLockReleasedOrExpired(existingLock)) { - // Lock is expired. Attempt to acquire lock. - logger.debug("lock is released or expired: " + existingLock); - LockModel updateLock = new LockModel(existingLock, getNow(), - lockDurationSeconds, false); - updateLock(updateLock, listener); - } else { - logger.debug("Lock is NOT released or expired. " + existingLock); - // Lock is still not expired. Return null as we cannot acquire lock. - listener.onResponse(null); - } - } else { - // There is no lock object and it is first time. Create new lock. - // Note that the lockID will be set to {jobIndexName}-{lockId} - LockModel tempLock = new LockModel(jobIndexName, lockId, getNow(), - lockDurationSeconds, false); - logger.debug("Lock does not exist. Creating new lock" + tempLock); - createLock(tempLock, listener); - } - }, - listener::onFailure - )); - } catch (VersionConflictEngineException e) { - logger.debug("could not acquire lock {}", e.getMessage()); - listener.onResponse(null); + createLockIndex(ActionListener.wrap(created -> { + if (created) { + try { + findLock(LockModel.generateLockId(jobIndexName, lockId), ActionListener.wrap(existingLock -> { + if (existingLock != null) { + if (isLockReleasedOrExpired(existingLock)) { + // Lock is expired. Attempt to acquire lock. + logger.debug("lock is released or expired: " + existingLock); + LockModel updateLock = new LockModel(existingLock, getNow(), lockDurationSeconds, false); + updateLock(updateLock, listener); + } else { + logger.debug("Lock is NOT released or expired. " + existingLock); + // Lock is still not expired. Return null as we cannot acquire lock. + listener.onResponse(null); + } + } else { + // There is no lock object and it is first time. Create new lock. + // Note that the lockID will be set to {jobIndexName}-{lockId} + LockModel tempLock = new LockModel(jobIndexName, lockId, getNow(), lockDurationSeconds, false); + logger.debug("Lock does not exist. Creating new lock" + tempLock); + createLock(tempLock, listener); } - } else { - listener.onResponse(null); - } - }, - listener::onFailure - )); + }, listener::onFailure)); + } catch (VersionConflictEngineException e) { + logger.debug("could not acquire lock {}", e.getMessage()); + listener.onResponse(null); + } + } else { + listener.onResponse(null); + } + }, listener::onFailure)); } } @@ -182,31 +181,34 @@ private boolean isLockReleasedOrExpired(final LockModel lock) { private void updateLock(final LockModel updateLock, ActionListener listener) { try { - UpdateRequest updateRequest = new UpdateRequest() - .index(LOCK_INDEX_NAME) + UpdateRequest updateRequest = new UpdateRequest().index(LOCK_INDEX_NAME) .id(updateLock.getLockId()) .setIfSeqNo(updateLock.getSeqNo()) .setIfPrimaryTerm(updateLock.getPrimaryTerm()) .doc(updateLock.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) .fetchSource(true); - client.update(updateRequest, ActionListener.wrap( - response -> listener.onResponse(new LockModel(updateLock, response.getSeqNo(), - response.getPrimaryTerm())), + client.update( + updateRequest, + ActionListener.wrap( + response -> listener.onResponse(new LockModel(updateLock, response.getSeqNo(), response.getPrimaryTerm())), exception -> { if (exception instanceof VersionConflictEngineException) { logger.debug("could not acquire lock {}", exception.getMessage()); } if (exception instanceof DocumentMissingException) { - logger.debug("Document is deleted. This happens if the job is already removed and" + - " this is the last run." + - "{}", exception.getMessage()); + logger.debug( + "Document is deleted. This happens if the job is already removed and" + " this is the last run." + "{}", + exception.getMessage() + ); } if (exception instanceof IOException) { logger.error("IOException occurred updating lock.", exception); } listener.onResponse(null); - })); + } + ) + ); } catch (IOException e) { logger.error("IOException occurred updating lock.", e); listener.onResponse(null); @@ -215,25 +217,26 @@ private void updateLock(final LockModel updateLock, ActionListener li private void createLock(final LockModel tempLock, ActionListener listener) { try { - final IndexRequest request = new IndexRequest(LOCK_INDEX_NAME) - .id(tempLock.getLockId()) + final IndexRequest request = new IndexRequest(LOCK_INDEX_NAME).id(tempLock.getLockId()) .source(tempLock.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) .setIfSeqNo(SequenceNumbers.UNASSIGNED_SEQ_NO) .setIfPrimaryTerm(SequenceNumbers.UNASSIGNED_PRIMARY_TERM) .create(true); - client.index(request, ActionListener.wrap( - response -> listener.onResponse(new LockModel(tempLock, response.getSeqNo(), - response.getPrimaryTerm())), + client.index( + request, + ActionListener.wrap( + response -> listener.onResponse(new LockModel(tempLock, response.getSeqNo(), response.getPrimaryTerm())), exception -> { - if (exception instanceof VersionConflictEngineException) { - logger.debug("Lock is already created. {}", exception.getMessage()); - } - if (exception instanceof IOException) { - logger.error("IOException occurred creating lock", exception); - } - listener.onResponse(null); + if (exception instanceof VersionConflictEngineException) { + logger.debug("Lock is already created. {}", exception.getMessage()); + } + if (exception instanceof IOException) { + logger.error("IOException occurred creating lock", exception); + } + listener.onResponse(null); } - )); + ) + ); } catch (IOException e) { logger.error("IOException occurred creating lock", e); listener.onResponse(null); @@ -242,28 +245,24 @@ private void createLock(final LockModel tempLock, ActionListener list private void findLock(final String lockId, ActionListener listener) { GetRequest getRequest = new GetRequest(LOCK_INDEX_NAME).id(lockId); - client.get(getRequest, ActionListener.wrap( - response -> { - if (!response.isExists()) { - listener.onResponse(null); - } else { - try { - XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.getSourceAsString()); - parser.nextToken(); - listener.onResponse(LockModel.parse(parser, response.getSeqNo(), response.getPrimaryTerm())); - } catch (IOException e) { - logger.error("IOException occurred finding lock", e); - listener.onResponse(null); - } - } - }, - exception -> { - logger.error("Exception occurred finding lock", exception); - listener.onFailure(exception); + client.get(getRequest, ActionListener.wrap(response -> { + if (!response.isExists()) { + listener.onResponse(null); + } else { + try { + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, response.getSourceAsString()); + parser.nextToken(); + listener.onResponse(LockModel.parse(parser, response.getSeqNo(), response.getPrimaryTerm())); + } catch (IOException e) { + logger.error("IOException occurred finding lock", e); + listener.onResponse(null); } - )); + } + }, exception -> { + logger.error("Exception occurred finding lock", exception); + listener.onFailure(exception); + })); } /** @@ -282,10 +281,7 @@ public void release(final LockModel lock, ActionListener listener) { } else { logger.debug("Releasing lock: " + lock); final LockModel lockToRelease = new LockModel(lock, true); - updateLock(lockToRelease, ActionListener.wrap( - releasedLock -> listener.onResponse(releasedLock != null), - listener::onFailure - )); + updateLock(lockToRelease, ActionListener.wrap(releasedLock -> listener.onResponse(releasedLock != null), listener::onFailure)); } } @@ -299,20 +295,18 @@ public void release(final LockModel lock, ActionListener listener) { */ public void deleteLock(final String lockId, ActionListener listener) { DeleteRequest deleteRequest = new DeleteRequest(LOCK_INDEX_NAME).id(lockId); - client.delete(deleteRequest, ActionListener.wrap( - response -> { - listener.onResponse(response.getResult() == DocWriteResponse.Result.DELETED || - response.getResult() == DocWriteResponse.Result.NOT_FOUND); - }, - exception -> { - if (exception instanceof IndexNotFoundException - || exception.getCause() instanceof IndexNotFoundException) { - logger.debug("Index is not found to delete lock. {}", exception.getMessage()); - listener.onResponse(true); - } else { - listener.onFailure(exception); - } - })); + client.delete(deleteRequest, ActionListener.wrap(response -> { + listener.onResponse( + response.getResult() == DocWriteResponse.Result.DELETED || response.getResult() == DocWriteResponse.Result.NOT_FOUND + ); + }, exception -> { + if (exception instanceof IndexNotFoundException || exception.getCause() instanceof IndexNotFoundException) { + logger.debug("Index is not found to delete lock. {}", exception.getMessage()); + listener.onResponse(true); + } else { + listener.onFailure(exception); + } + })); } /** @@ -330,20 +324,25 @@ public void renewLock(final LockModel lock, ActionListener listener) logger.debug("Lock is null. Nothing to renew."); listener.onResponse(null); } else { - logger.debug("Renewing lock: {}. The lock was acquired or renewed on: {}, and the duration was {} sec.", - lock, lock.getLockTime(), lock.getLockDurationSeconds()); + logger.debug( + "Renewing lock: {}. The lock was acquired or renewed on: {}, and the duration was {} sec.", + lock, + lock.getLockTime(), + lock.getLockDurationSeconds() + ); final LockModel lockToRenew = new LockModel(lock, getNow(), lock.getLockDurationSeconds(), false); - updateLock(lockToRenew, ActionListener.wrap( - renewedLock -> { - logger.debug("Renewed lock: {}. It is supposed to be valid for another {} sec from {}.", - renewedLock, renewedLock.getLockDurationSeconds(), renewedLock.getLockTime()); - listener.onResponse(renewedLock); - }, - exception -> { - logger.debug("Failed to renew lock: {}.", lock); - listener.onFailure(exception); - } - )); + updateLock(lockToRenew, ActionListener.wrap(renewedLock -> { + logger.debug( + "Renewed lock: {}. It is supposed to be valid for another {} sec from {}.", + renewedLock, + renewedLock.getLockDurationSeconds(), + renewedLock.getLockTime() + ); + listener.onResponse(renewedLock); + }, exception -> { + logger.debug("Failed to renew lock: {}.", lock); + listener.onFailure(exception); + })); } } diff --git a/spi/src/main/resources/org/opensearch/jobscheduler/spi/utils/opensearch_job_scheduler_lock.json b/spi/src/main/resources/org/opensearch/jobscheduler/spi/utils/opensearch_job_scheduler_lock.json index c23217c2..059641ef 100644 --- a/spi/src/main/resources/org/opensearch/jobscheduler/spi/utils/opensearch_job_scheduler_lock.json +++ b/spi/src/main/resources/org/opensearch/jobscheduler/spi/utils/opensearch_job_scheduler_lock.json @@ -18,4 +18,4 @@ "type": "boolean" } } -} \ No newline at end of file +} diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java index 9d0e0f1c..2c76666c 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/CronScheduleTests.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import com.cronutils.model.time.ExecutionTime; @@ -45,9 +48,11 @@ public void testDifferentClocks() { pdtClockCronSchedule.setClock(pdtClock); CronSchedule utcClockCronSchedule = new CronSchedule("* * * * *", ZoneId.of("UTC")); utcClockCronSchedule.setClock(utcClock); - assertEquals("Next execution time based on different clock should be same.", + assertEquals( + "Next execution time based on different clock should be same.", pdtClockCronSchedule.getNextExecutionTime(null), - utcClockCronSchedule.getNextExecutionTime(null)); + utcClockCronSchedule.getNextExecutionTime(null) + ); } public void testNextTimeToExecute() { @@ -67,10 +72,11 @@ public void testNextTimeToExecute() { Assert.assertEquals(expected, duration); Assert.assertEquals(expectedDelay, durationDelay); - Assert.assertEquals(this.cronSchedule.nextTimeToExecute(), - Duration.between(now, this.cronSchedule.getNextExecutionTime(now))); - Assert.assertEquals(this.cronScheduleDelay.nextTimeToExecute(), - Duration.between(now, this.cronScheduleDelay.getNextExecutionTime(now))); + Assert.assertEquals(this.cronSchedule.nextTimeToExecute(), Duration.between(now, this.cronSchedule.getNextExecutionTime(now))); + Assert.assertEquals( + this.cronScheduleDelay.nextTimeToExecute(), + Duration.between(now, this.cronScheduleDelay.getNextExecutionTime(now)) + ); } public void testGetPeriodStartingAt() { @@ -199,10 +205,9 @@ public void testRunningOnTime_noLastExecution() { this.cronSchedule.setExecutionTime(mockExecutionTime); this.cronScheduleDelay.setExecutionTime(mockExecutionTime); - Mockito.when(mockExecutionTime.lastExecution(ZonedDateTime.ofInstant(now, ZoneId.systemDefault()))) - .thenReturn(Optional.empty()); + Mockito.when(mockExecutionTime.lastExecution(ZonedDateTime.ofInstant(now, ZoneId.systemDefault()))).thenReturn(Optional.empty()); Mockito.when(mockExecutionTime.lastExecution(ZonedDateTime.ofInstant(now.minusMillis(DELAY), ZoneId.systemDefault()))) - .thenReturn(Optional.empty()); + .thenReturn(Optional.empty()); Assert.assertFalse(this.cronSchedule.runningOnTime(now)); Assert.assertFalse(this.cronScheduleDelay.runningOnTime(now)); @@ -211,13 +216,11 @@ public void testRunningOnTime_noLastExecution() { public void testToXContent() throws IOException { CronSchedule schedule = new CronSchedule("* * * * *", ZoneId.of("PST8PDT")); String expectedJsonStr = "{\"cron\":{\"expression\":\"* * * * *\",\"timezone\":\"PST8PDT\"}}"; - Assert.assertEquals(expectedJsonStr, - XContentHelper.toXContent(schedule, XContentType.JSON, false).utf8ToString()); + Assert.assertEquals(expectedJsonStr, XContentHelper.toXContent(schedule, XContentType.JSON, false).utf8ToString()); CronSchedule scheduleDelay = new CronSchedule("* * * * *", ZoneId.of("PST8PDT"), 1234); String expectedJsonStrDelay = "{\"cron\":{\"expression\":\"* * * * *\",\"timezone\":\"PST8PDT\",\"schedule_delay\":1234}}"; - Assert.assertEquals(expectedJsonStrDelay, - XContentHelper.toXContent(scheduleDelay, XContentType.JSON, false).utf8ToString()); + Assert.assertEquals(expectedJsonStrDelay, XContentHelper.toXContent(scheduleDelay, XContentType.JSON, false).utf8ToString()); } public void testCronScheduleEqualsAndHashCode() { @@ -231,7 +234,11 @@ public void testCronScheduleEqualsAndHashCode() { Assert.assertNotEquals("Different cron schedules were called equal", cronScheduleOne, cronScheduleThree); Assert.assertEquals("Identical cron schedules had different hash codes", cronScheduleOne.hashCode(), cronScheduleTwo.hashCode()); Assert.assertNotEquals("Different cron schedules were called equal", cronScheduleThree, cronScheduleFour); - Assert.assertNotEquals("Different cron schedules had the same hash code", cronScheduleThree.hashCode(), cronScheduleFour.hashCode()); + Assert.assertNotEquals( + "Different cron schedules had the same hash code", + cronScheduleThree.hashCode(), + cronScheduleFour.hashCode() + ); Assert.assertEquals("Identical cron schedules were not equal", cronScheduleFour, cronScheduleFive); Assert.assertEquals("Identical cron schedules had different hash codes", cronScheduleFour.hashCode(), cronScheduleFive.hashCode()); } diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java index 1217c17f..cc823e9d 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/IntervalScheduleTests.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import org.opensearch.common.collect.Tuple; @@ -39,7 +42,7 @@ public void setup() throws ParseException { this.intervalScheduleDelay = new IntervalSchedule(startTime, 3, ChronoUnit.MINUTES, DELAY); } - @Test (expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) public void testConstructor_notSupportedTimeUnit() throws ParseException { Instant startTime = new SimpleDateFormat("MM/dd/yyyy").parse("01/01/2019").toInstant(); new IntervalSchedule(startTime, 1, ChronoUnit.MILLIS); @@ -54,16 +57,21 @@ public void testNextTimeToExecution() { Instant nextMinute = Instant.ofEpochSecond(now.getEpochSecond() / 60 * 60 + 60); Duration expected = Duration.of(nextMinute.toEpochMilli() - now.toEpochMilli(), ChronoUnit.MILLIS); - Instant nextIntervalPlusDelay = Instant.ofEpochSecond(((now.minusMillis(DELAY).getEpochSecond()) / 180 * 180) + 180).plusMillis(DELAY); + Instant nextIntervalPlusDelay = Instant.ofEpochSecond(((now.minusMillis(DELAY).getEpochSecond()) / 180 * 180) + 180) + .plusMillis(DELAY); Duration expectedDelay = Duration.of(nextIntervalPlusDelay.toEpochMilli() - now.toEpochMilli(), ChronoUnit.MILLIS); Assert.assertEquals(expected, this.intervalSchedule.nextTimeToExecute()); Assert.assertEquals(expectedDelay, this.intervalScheduleDelay.nextTimeToExecute()); - Assert.assertEquals(this.intervalSchedule.nextTimeToExecute(), - Duration.between(now, this.intervalSchedule.getNextExecutionTime(now))); - Assert.assertEquals(this.intervalScheduleDelay.nextTimeToExecute(), - Duration.between(now, this.intervalScheduleDelay.getNextExecutionTime(now))); + Assert.assertEquals( + this.intervalSchedule.nextTimeToExecute(), + Duration.between(now, this.intervalSchedule.getNextExecutionTime(now)) + ); + Assert.assertEquals( + this.intervalScheduleDelay.nextTimeToExecute(), + Duration.between(now, this.intervalScheduleDelay.getNextExecutionTime(now)) + ); } public void testGetPeriodStartingAt() { @@ -90,7 +98,7 @@ public void testGetPeriodStartingAt_nullParam() { public void testRunningOnTime() { Instant now = Instant.now(); - if(now.toEpochMilli() % (60 * 1000) == 0) { + if (now.toEpochMilli() % (60 * 1000) == 0) { // test "now" is not execution time case now = now.plus(10, ChronoUnit.SECONDS); } @@ -130,7 +138,7 @@ public void testRunningOnTime() { public void testRunningOnTimeWithDelay() { Instant now = Instant.now(); - if(now.minusMillis(DELAY).toEpochMilli() % (180 * 1000) == 0) { + if (now.minusMillis(DELAY).toEpochMilli() % (180 * 1000) == 0) { // test "now" is not execution time case now = now.plus(10, ChronoUnit.SECONDS); } @@ -176,16 +184,17 @@ public void testRunningOnTime_nullLastExetime() { public void testToXContent() throws IOException { long epochMillis = this.startTime.toEpochMilli(); String xContentJsonStr = "{\"interval\":{\"start_time\":" + epochMillis + ",\"period\":1,\"unit\":\"Minutes\"}}"; - XContentHelper.toXContent(this.intervalSchedule, XContentType.JSON, false) - .utf8ToString(); - Assert.assertEquals(xContentJsonStr, XContentHelper.toXContent(this.intervalSchedule, XContentType.JSON, false) - .utf8ToString()); - - String xContentJsonStrDelay = "{\"interval\":{\"start_time\":" + epochMillis + ",\"period\":3,\"unit\":\"Minutes\",\"schedule_delay\":15000}}"; - XContentHelper.toXContent(this.intervalScheduleDelay, XContentType.JSON, false) - .utf8ToString(); - Assert.assertEquals(xContentJsonStrDelay, XContentHelper.toXContent(this.intervalScheduleDelay, XContentType.JSON, false) - .utf8ToString()); + XContentHelper.toXContent(this.intervalSchedule, XContentType.JSON, false).utf8ToString(); + Assert.assertEquals(xContentJsonStr, XContentHelper.toXContent(this.intervalSchedule, XContentType.JSON, false).utf8ToString()); + + String xContentJsonStrDelay = "{\"interval\":{\"start_time\":" + + epochMillis + + ",\"period\":3,\"unit\":\"Minutes\",\"schedule_delay\":15000}}"; + XContentHelper.toXContent(this.intervalScheduleDelay, XContentType.JSON, false).utf8ToString(); + Assert.assertEquals( + xContentJsonStrDelay, + XContentHelper.toXContent(this.intervalScheduleDelay, XContentType.JSON, false).utf8ToString() + ); } public void testIntervalScheduleEqualsAndHashCode() { @@ -198,10 +207,18 @@ public void testIntervalScheduleEqualsAndHashCode() { Assert.assertEquals("Identical interval schedules were not equal", intervalScheduleOne, intervalScheduleTwo); Assert.assertNotEquals("Different interval schedules were called equal", intervalScheduleOne, intervalScheduleThree); - Assert.assertEquals("Identical interval schedules had different hash codes", intervalScheduleOne.hashCode(), intervalScheduleTwo.hashCode()); + Assert.assertEquals( + "Identical interval schedules had different hash codes", + intervalScheduleOne.hashCode(), + intervalScheduleTwo.hashCode() + ); Assert.assertNotEquals("Different interval schedules were called equal", intervalScheduleOne, intervalScheduleFour); Assert.assertEquals("Identical interval schedules were not equal", intervalScheduleFour, intervalScheduleFive); - Assert.assertEquals("Identical interval schedules had different hash codes", intervalScheduleFour.hashCode(), intervalScheduleFive.hashCode()); + Assert.assertEquals( + "Identical interval schedules had different hash codes", + intervalScheduleFour.hashCode(), + intervalScheduleFive.hashCode() + ); } public void testIntervalScheduleAsStream() throws Exception { diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/JobDocVersionTests.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/JobDocVersionTests.java index 9092145c..bda2cb8b 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/JobDocVersionTests.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/JobDocVersionTests.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import org.opensearch.jobscheduler.spi.JobDocVersion; diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java index 4f7f5164..8f87edbd 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/schedule/ScheduleParserTests.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.schedule; import org.opensearch.common.bytes.BytesArray; @@ -27,25 +30,25 @@ public void testParseCronSchedule() throws IOException { Schedule schedule = ScheduleParser.parse(parser); Assert.assertTrue(schedule instanceof CronSchedule); - Assert.assertEquals("* * * * *", ((CronSchedule)schedule).getCronExpression()); - Assert.assertEquals(ZoneId.of("PST8PDT"), ((CronSchedule)schedule).getTimeZone()); + Assert.assertEquals("* * * * *", ((CronSchedule) schedule).getCronExpression()); + Assert.assertEquals(ZoneId.of("PST8PDT"), ((CronSchedule) schedule).getTimeZone()); } public void testParseIntervalSchedule() throws IOException { - String intervalScheduleJsonStr = "{\"interval\":{\"start_time\":1546329600000,\"period\":1,\"unit\":\"Minutes\"" + - ", \"schedule_delay\":1234}}"; + String intervalScheduleJsonStr = "{\"interval\":{\"start_time\":1546329600000,\"period\":1,\"unit\":\"Minutes\"" + + ", \"schedule_delay\":1234}}"; XContentParser parser = this.createParser(XContentType.JSON.xContent(), new BytesArray(intervalScheduleJsonStr)); parser.nextToken(); Schedule schedule = ScheduleParser.parse(parser); Assert.assertTrue(schedule instanceof IntervalSchedule); - Assert.assertEquals(Instant.ofEpochMilli(1546329600000L).plusMillis(1234), ((IntervalSchedule)schedule).getStartTime()); - Assert.assertEquals(1, ((IntervalSchedule)schedule).getInterval()); - Assert.assertEquals(ChronoUnit.MINUTES, ((IntervalSchedule)schedule).getUnit()); + Assert.assertEquals(Instant.ofEpochMilli(1546329600000L).plusMillis(1234), ((IntervalSchedule) schedule).getStartTime()); + Assert.assertEquals(1, ((IntervalSchedule) schedule).getInterval()); + Assert.assertEquals(ChronoUnit.MINUTES, ((IntervalSchedule) schedule).getUnit()); } - @Test (expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) public void testUnknownScheduleType() throws IOException { String scheduleJsonStr = "{\"unknown_type\":{\"field\":\"value\"}}"; @@ -54,7 +57,7 @@ public void testUnknownScheduleType() throws IOException { ScheduleParser.parse(parser); } - @Test (expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) public void test_unknownFieldInCronSchedule() throws IOException { String cronScheduleJsonStr = "{\"cron\":{\"expression\":\"* * * * *\",\"unknown_field\":\"value\"}}"; @@ -63,7 +66,7 @@ public void test_unknownFieldInCronSchedule() throws IOException { ScheduleParser.parse(parser); } - @Test (expected = IllegalArgumentException.class) + @Test(expected = IllegalArgumentException.class) public void test_unknownFiledInIntervalSchedule() throws IOException { String intervalScheduleJsonStr = "{\"interval\":{\"start_time\":1546329600000,\"period\":1,\"unknown_filed\":\"value\"}}"; diff --git a/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java b/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java index 20c0c293..6d030fc1 100644 --- a/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java +++ b/spi/src/test/java/org/opensearch/jobscheduler/spi/utils/LockServiceIT.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.spi.utils; import org.junit.Before; @@ -36,33 +39,40 @@ public class LockServiceIT extends OpenSearchIntegTestCase { static final String JOB_ID = "test_job_id"; static final String JOB_INDEX_NAME = "test_job_index_name"; static final long LOCK_DURATION_SECONDS = 60; - static final ScheduledJobParameter TEST_SCHEDULED_JOB_PARAM = new ScheduledJobParameter() { + static final ScheduledJobParameter TEST_SCHEDULED_JOB_PARAM = new ScheduledJobParameter() { - @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { return builder; } - @Override public String getName() { + @Override + public String getName() { return null; } - @Override public Instant getLastUpdateTime() { + @Override + public Instant getLastUpdateTime() { return null; } - @Override public Instant getEnabledTime() { + @Override + public Instant getEnabledTime() { return null; } - @Override public Schedule getSchedule() { + @Override + public Schedule getSchedule() { return null; } - @Override public boolean isEnabled() { + @Override + public boolean isEnabled() { return false; } - @Override public Long getLockDurationSeconds() { + @Override + public Long getLockDurationSeconds() { return LOCK_DURATION_SECONDS; } }; @@ -75,45 +85,44 @@ public void setup() { // thus the OpenSearchIntegTestCase.clusterService() will throw exception. this.clusterService = Mockito.mock(ClusterService.class, Mockito.RETURNS_DEEP_STUBS); Mockito.when(this.clusterService.state().routingTable().hasIndex(".opendistro-job-scheduler-lock")) - .thenReturn(false) - .thenReturn(true); + .thenReturn(false) + .thenReturn(true); } public void testSanity() throws Exception { String uniqSuffix = "_sanity"; CountDownLatch latch = new CountDownLatch(1); LockService lockService = new LockService(client(), this.clusterService); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); Instant testTime = Instant.now(); lockService.setTime(testTime); - lockService.acquireLock(TEST_SCHEDULED_JOB_PARAM, context, ActionListener.wrap( - lock -> { - assertNotNull("Expected to successfully grab lock.", lock); - assertEquals("job_id does not match.", JOB_ID + uniqSuffix, lock.getJobId()); - assertEquals("job_index_name does not match.", JOB_INDEX_NAME + uniqSuffix, lock.getJobIndexName()); - assertEquals("lock_id does not match.", LockModel.generateLockId(JOB_INDEX_NAME + uniqSuffix, - JOB_ID + uniqSuffix), lock.getLockId()); - assertEquals("lock_duration_seconds does not match.", LOCK_DURATION_SECONDS, lock.getLockDurationSeconds()); - assertEquals("lock_time does not match.", testTime.getEpochSecond(), lock.getLockTime().getEpochSecond()); - assertFalse("Lock should not be released.", lock.isReleased()); - assertFalse("Lock should not expire.", lock.isExpired()); - lockService.release(lock, ActionListener.wrap( - released -> { - assertTrue("Failed to release lock.", released); - lockService.deleteLock(lock.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); + lockService.acquireLock(TEST_SCHEDULED_JOB_PARAM, context, ActionListener.wrap(lock -> { + assertNotNull("Expected to successfully grab lock.", lock); + assertEquals("job_id does not match.", JOB_ID + uniqSuffix, lock.getJobId()); + assertEquals("job_index_name does not match.", JOB_INDEX_NAME + uniqSuffix, lock.getJobIndexName()); + assertEquals( + "lock_id does not match.", + LockModel.generateLockId(JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix), + lock.getLockId() + ); + assertEquals("lock_duration_seconds does not match.", LOCK_DURATION_SECONDS, lock.getLockDurationSeconds()); + assertEquals("lock_time does not match.", testTime.getEpochSecond(), lock.getLockTime().getEpochSecond()); + assertFalse("Lock should not be released.", lock.isReleased()); + assertFalse("Lock should not expire.", lock.isExpired()); + lockService.release(lock, ActionListener.wrap(released -> { + assertTrue("Failed to release lock.", released); + lockService.deleteLock(lock.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); latch.await(5L, TimeUnit.SECONDS); } @@ -122,36 +131,32 @@ public void testSanityWithCustomLockID() throws Exception { String uniqSuffix = "_sanity"; CountDownLatch latch = new CountDownLatch(1); LockService lockService = new LockService(client(), this.clusterService); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); Instant testTime = Instant.now(); lockService.setTime(testTime); - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock -> { - assertNotNull("Expected to successfully grab lock.", lock); - assertEquals("job_id does not match.", lockID, lock.getJobId()); - assertEquals("job_index_name does not match.", JOB_INDEX_NAME + uniqSuffix, lock.getJobIndexName()); - assertEquals("lock_id does not match.", lock.getJobIndexName() + "-" + lockID, lock.getLockId()); - assertEquals("lock_duration_seconds does not match.", LOCK_DURATION_SECONDS, lock.getLockDurationSeconds()); - assertEquals("lock_time does not match.", testTime.getEpochSecond(), lock.getLockTime().getEpochSecond()); - assertFalse("Lock should not be released.", lock.isReleased()); - assertFalse("Lock should not expire.", lock.isExpired()); - lockService.release(lock, ActionListener.wrap( - released -> { - assertTrue("Failed to release lock.", released); - lockService.deleteLock(lock.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { + assertNotNull("Expected to successfully grab lock.", lock); + assertEquals("job_id does not match.", lockID, lock.getJobId()); + assertEquals("job_index_name does not match.", JOB_INDEX_NAME + uniqSuffix, lock.getJobIndexName()); + assertEquals("lock_id does not match.", lock.getJobIndexName() + "-" + lockID, lock.getLockId()); + assertEquals("lock_duration_seconds does not match.", LOCK_DURATION_SECONDS, lock.getLockDurationSeconds()); + assertEquals("lock_time does not match.", testTime.getEpochSecond(), lock.getLockTime().getEpochSecond()); + assertFalse("Lock should not be released.", lock.isReleased()); + assertFalse("Lock should not expire.", lock.isExpired()); + lockService.release(lock, ActionListener.wrap(released -> { + assertTrue("Failed to release lock.", released); + lockService.deleteLock(lock.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); latch.await(5L, TimeUnit.SECONDS); } @@ -160,34 +165,27 @@ public void testSecondAcquireLockFail() throws Exception { String lockID = randomAlphaOfLengthBetween(6, 15); CountDownLatch latch = new CountDownLatch(1); LockService lockService = new LockService(client(), this.clusterService); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); - - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock -> { - assertNotNull("Expected to successfully grab lock", lock); - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock2 -> { - assertNull("Expected to failed to get lock.", lock2); - lockService.release(lock, ActionListener.wrap( - released -> { - assertTrue("Failed to release lock.", released); - lockService.deleteLock(lock.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); + + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { + assertNotNull("Expected to successfully grab lock", lock); + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock2 -> { + assertNull("Expected to failed to get lock.", lock2); + lockService.release(lock, ActionListener.wrap(released -> { + assertTrue("Failed to release lock.", released); + lockService.deleteLock(lock.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); latch.await(10L, TimeUnit.SECONDS); } @@ -196,40 +194,30 @@ public void testLockReleasedAndAcquired() throws Exception { String lockID = randomAlphaOfLengthBetween(6, 15); CountDownLatch latch = new CountDownLatch(1); LockService lockService = new LockService(client(), this.clusterService); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); - - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock -> { - assertNotNull("Expected to successfully grab lock", lock); - lockService.release(lock, ActionListener.wrap( - released -> { - assertTrue("Failed to release lock.", released); - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock2 -> { - assertNotNull("Expected to successfully grab lock2", lock2); - lockService.release(lock2, ActionListener.wrap( - released2 -> { - assertTrue("Failed to release lock2.", released2); - lockService.deleteLock(lock2.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock2.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); + + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { + assertNotNull("Expected to successfully grab lock", lock); + lockService.release(lock, ActionListener.wrap(released -> { + assertTrue("Failed to release lock.", released); + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock2 -> { + assertNotNull("Expected to successfully grab lock2", lock2); + lockService.release(lock2, ActionListener.wrap(released2 -> { + assertTrue("Failed to release lock2.", released2); + lockService.deleteLock(lock2.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock2.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); latch.await(5L, TimeUnit.SECONDS); } @@ -240,78 +228,59 @@ public void testLockExpired() throws Exception { LockService lockService = new LockService(client(), this.clusterService); // Set lock time in the past. lockService.setTime(Instant.now().minus(Duration.ofSeconds(LOCK_DURATION_SECONDS + LOCK_DURATION_SECONDS))); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); - - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock -> { - assertNotNull("Expected to successfully grab lock", lock); - // Set lock back to current time to make the lock expire. - lockService.setTime(null); - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock2 -> { - assertNotNull("Expected to successfully grab lock", lock2); - lockService.release(lock, ActionListener.wrap( - released -> { - assertFalse("Expected to fail releasing lock.", released); - lockService.release(lock2, ActionListener.wrap( - released2 -> { - assertTrue("Expecting to successfully release lock.", released2); - lockService.deleteLock(lock.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); + + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { + assertNotNull("Expected to successfully grab lock", lock); + // Set lock back to current time to make the lock expire. + lockService.setTime(null); + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock2 -> { + assertNotNull("Expected to successfully grab lock", lock2); + lockService.release(lock, ActionListener.wrap(released -> { + assertFalse("Expected to fail releasing lock.", released); + lockService.release(lock2, ActionListener.wrap(released2 -> { + assertTrue("Expecting to successfully release lock.", released2); + lockService.deleteLock(lock.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); latch.await(5L, TimeUnit.SECONDS); } public void testDeleteLockWithOutIndexCreation() throws Exception { CountDownLatch latch = new CountDownLatch(1); LockService lockService = new LockService(client(), this.clusterService); - lockService.deleteLock("NonExistingLockId", ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); + lockService.deleteLock("NonExistingLockId", ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); latch.await(5L, TimeUnit.SECONDS); } public void testDeleteNonExistingLock() throws Exception { CountDownLatch latch = new CountDownLatch(1); LockService lockService = new LockService(client(), this.clusterService); - lockService.createLockIndex(ActionListener.wrap( - created -> { - if (created) { - lockService.deleteLock("NonExistingLockId", ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - - } else { - fail("Failed to create lock index."); - } - }, - exception -> fail(exception.getMessage()) - )); + lockService.createLockIndex(ActionListener.wrap(created -> { + if (created) { + lockService.deleteLock("NonExistingLockId", ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + + } else { + fail("Failed to create lock index."); + } + }, exception -> fail(exception.getMessage()))); latch.await(5L, TimeUnit.SECONDS); } @@ -323,70 +292,58 @@ public void testMultiThreadCreateLock() throws Exception { String lockID = randomAlphaOfLengthBetween(6, 15); CountDownLatch latch = new CountDownLatch(1); final LockService lockService = new LockService(client(), this.clusterService); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); - - lockService.createLockIndex(ActionListener.wrap( - created -> { - if (created) { - ExecutorService executor = Executors.newFixedThreadPool(3); - final AtomicReference lockModelAtomicReference = new AtomicReference<>(null); - Callable callable = () -> { - CountDownLatch callableLatch = new CountDownLatch(1); - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock -> { - if (lock != null) { - lockModelAtomicReference.set(lock); - multiThreadCreateLockCounter.getAndAdd(1); - } - callableLatch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - callableLatch.await(5L, TimeUnit.SECONDS); - return true; - }; - - List> callables = Arrays.asList( - callable, - callable, - callable - ); - - executor.invokeAll(callables) - .forEach(future -> { - try { - future.get(); - } catch (Exception e) { - fail(e.getMessage()); - } - }); - executor.shutdown(); - executor.awaitTermination(10L, TimeUnit.SECONDS); - - assertEquals("There should be only one that grabs the lock.", 1, multiThreadCreateLockCounter.get()); - - final LockModel lock = lockModelAtomicReference.get(); - assertNotNull("Expected to successfully grab lock", lock); - lockService.release(lock, ActionListener.wrap( - released -> { - assertTrue("Failed to release lock.", released); - lockService.deleteLock(lock.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - } else { - fail("Failed to create lock index."); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); + + lockService.createLockIndex(ActionListener.wrap(created -> { + if (created) { + ExecutorService executor = Executors.newFixedThreadPool(3); + final AtomicReference lockModelAtomicReference = new AtomicReference<>(null); + Callable callable = () -> { + CountDownLatch callableLatch = new CountDownLatch(1); + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { + if (lock != null) { + lockModelAtomicReference.set(lock); + multiThreadCreateLockCounter.getAndAdd(1); + } + callableLatch.countDown(); + }, exception -> fail(exception.getMessage()))); + callableLatch.await(5L, TimeUnit.SECONDS); + return true; + }; + + List> callables = Arrays.asList(callable, callable, callable); + + executor.invokeAll(callables).forEach(future -> { + try { + future.get(); + } catch (Exception e) { + fail(e.getMessage()); } - }, - exception -> fail(exception.getMessage()) - )); + }); + executor.shutdown(); + executor.awaitTermination(10L, TimeUnit.SECONDS); + + assertEquals("There should be only one that grabs the lock.", 1, multiThreadCreateLockCounter.get()); + + final LockModel lock = lockModelAtomicReference.get(); + assertNotNull("Expected to successfully grab lock", lock); + lockService.release(lock, ActionListener.wrap(released -> { + assertTrue("Failed to release lock.", released); + lockService.deleteLock(lock.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + } else { + fail("Failed to create lock index."); + } + }, exception -> fail(exception.getMessage()))); assertTrue("Test timed out - possibly leaked into other tests", latch.await(30L, TimeUnit.SECONDS)); } @@ -398,74 +355,65 @@ public void testMultiThreadAcquireLock() throws Exception { String lockID = randomAlphaOfLengthBetween(6, 15); CountDownLatch latch = new CountDownLatch(1); final LockService lockService = new LockService(client(), this.clusterService); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); - - lockService.createLockIndex(ActionListener.wrap( - created -> { - if (created) { - // Set lock time in the past. - lockService.setTime(Instant.now().minus(Duration.ofSeconds(LOCK_DURATION_SECONDS + LOCK_DURATION_SECONDS))); - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - createdLock -> { - assertNotNull(createdLock); - // Set lock back to current time to make the lock expire. - lockService.setTime(null); - - ExecutorService executor = Executors.newFixedThreadPool(3); - final AtomicReference lockModelAtomicReference = new AtomicReference<>(null); - Callable callable = () -> { - CountDownLatch callableLatch = new CountDownLatch(1); - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock -> { - if (lock != null) { - lockModelAtomicReference.set(lock); - Integer test = multiThreadAcquireLockCounter.getAndAdd(1); - } - callableLatch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - callableLatch.await(5L, TimeUnit.SECONDS); - return true; - }; - - List> callables = Arrays.asList( - callable, - callable, - callable - ); - - executor.invokeAll(callables); - executor.shutdown(); - executor.awaitTermination(10L, TimeUnit.SECONDS); - - assertEquals("There should be only one that grabs the lock.", 1, multiThreadAcquireLockCounter.get()); - - final LockModel lock = lockModelAtomicReference.get(); - assertNotNull("Expected to successfully grab lock", lock); - lockService.release(lock, ActionListener.wrap( - released -> { - assertTrue("Failed to release lock.", released); - lockService.deleteLock(lock.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - } else { - fail("Failed to create lock index."); - } - }, - exception -> fail(exception.getMessage()) - )); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); + + lockService.createLockIndex(ActionListener.wrap(created -> { + if (created) { + // Set lock time in the past. + lockService.setTime(Instant.now().minus(Duration.ofSeconds(LOCK_DURATION_SECONDS + LOCK_DURATION_SECONDS))); + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(createdLock -> { + assertNotNull(createdLock); + // Set lock back to current time to make the lock expire. + lockService.setTime(null); + + ExecutorService executor = Executors.newFixedThreadPool(3); + final AtomicReference lockModelAtomicReference = new AtomicReference<>(null); + Callable callable = () -> { + CountDownLatch callableLatch = new CountDownLatch(1); + lockService.acquireLockWithId( + context.getJobIndexName(), + LOCK_DURATION_SECONDS, + lockID, + ActionListener.wrap(lock -> { + if (lock != null) { + lockModelAtomicReference.set(lock); + Integer test = multiThreadAcquireLockCounter.getAndAdd(1); + } + callableLatch.countDown(); + }, exception -> fail(exception.getMessage())) + ); + callableLatch.await(5L, TimeUnit.SECONDS); + return true; + }; + + List> callables = Arrays.asList(callable, callable, callable); + + executor.invokeAll(callables); + executor.shutdown(); + executor.awaitTermination(10L, TimeUnit.SECONDS); + + assertEquals("There should be only one that grabs the lock.", 1, multiThreadAcquireLockCounter.get()); + + final LockModel lock = lockModelAtomicReference.get(); + assertNotNull("Expected to successfully grab lock", lock); + lockService.release(lock, ActionListener.wrap(released -> { + assertTrue("Failed to release lock.", released); + lockService.deleteLock(lock.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + } else { + fail("Failed to create lock index."); + } + }, exception -> fail(exception.getMessage()))); assertTrue("Test timed out - possibly leaked into other tests", latch.await(30L, TimeUnit.SECONDS)); } @@ -474,40 +422,36 @@ public void testRenewLock() throws Exception { String lockID = randomAlphaOfLengthBetween(6, 15); CountDownLatch latch = new CountDownLatch(1); LockService lockService = new LockService(client(), this.clusterService); - final JobExecutionContext context = new JobExecutionContext(Instant.now(), new JobDocVersion(0, 0, 0), - lockService, JOB_INDEX_NAME + uniqSuffix, JOB_ID + uniqSuffix); - - lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap( - lock -> { - assertNotNull("Expected to successfully grab lock", lock); - // Set the time of LockService (the 'lockTime' of acquired locks) to a fixed time. - Instant now = Instant.now(); - lockService.setTime(now); - lockService.renewLock(lock, ActionListener.wrap( - renewedLock -> { - assertNotNull("Expected to successfully renew lock", renewedLock); - assertEquals("lock_time is expected to be the renewal time.", now, renewedLock.getLockTime()); - assertEquals("lock_duration is expected to be unchanged.", - lock.getLockDurationSeconds(), renewedLock.getLockDurationSeconds()); - lockService.release(lock, ActionListener.wrap( - released -> { - assertTrue("Failed to release lock.", released); - lockService.deleteLock(lock.getLockId(), ActionListener.wrap( - deleted -> { - assertTrue("Failed to delete lock.", deleted); - latch.countDown(); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); - }, - exception -> fail(exception.getMessage()) - )); + final JobExecutionContext context = new JobExecutionContext( + Instant.now(), + new JobDocVersion(0, 0, 0), + lockService, + JOB_INDEX_NAME + uniqSuffix, + JOB_ID + uniqSuffix + ); + + lockService.acquireLockWithId(context.getJobIndexName(), LOCK_DURATION_SECONDS, lockID, ActionListener.wrap(lock -> { + assertNotNull("Expected to successfully grab lock", lock); + // Set the time of LockService (the 'lockTime' of acquired locks) to a fixed time. + Instant now = Instant.now(); + lockService.setTime(now); + lockService.renewLock(lock, ActionListener.wrap(renewedLock -> { + assertNotNull("Expected to successfully renew lock", renewedLock); + assertEquals("lock_time is expected to be the renewal time.", now, renewedLock.getLockTime()); + assertEquals( + "lock_duration is expected to be unchanged.", + lock.getLockDurationSeconds(), + renewedLock.getLockDurationSeconds() + ); + lockService.release(lock, ActionListener.wrap(released -> { + assertTrue("Failed to release lock.", released); + lockService.deleteLock(lock.getLockId(), ActionListener.wrap(deleted -> { + assertTrue("Failed to delete lock.", deleted); + latch.countDown(); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); + }, exception -> fail(exception.getMessage()))); latch.await(5L, TimeUnit.SECONDS); } } diff --git a/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java b/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java index 311b2f3a..126d08c0 100644 --- a/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java +++ b/src/main/java/org/opensearch/jobscheduler/JobSchedulerPlugin.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler; import org.opensearch.jobscheduler.scheduler.JobScheduler; @@ -46,7 +49,6 @@ import java.util.Set; import java.util.function.Supplier; - public class JobSchedulerPlugin extends Plugin implements ExtensiblePlugin { public static final String OPEN_DISTRO_JOB_SCHEDULER_THREAD_POOL_NAME = "open_distro_job_scheduler"; @@ -65,16 +67,30 @@ public JobSchedulerPlugin() { } @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier) { + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier + ) { this.lockService = new LockService(client, clusterService); this.scheduler = new JobScheduler(threadPool, this.lockService); - this.sweeper = initSweeper(environment.settings(), client, clusterService, threadPool, xContentRegistry, - this.scheduler, this.lockService); + this.sweeper = initSweeper( + environment.settings(), + client, + clusterService, + threadPool, + xContentRegistry, + this.scheduler, + this.lockService + ); clusterService.addListener(this.sweeper); clusterService.addLifecycleListener(this.sweeper); @@ -104,15 +120,22 @@ public List> getExecutorBuilders(Settings settings) { final int processorCount = OpenSearchExecutors.allocatedProcessors(settings); List> executorBuilders = new ArrayList<>(); - executorBuilders.add(new FixedExecutorBuilder(settings, OPEN_DISTRO_JOB_SCHEDULER_THREAD_POOL_NAME, - processorCount, 200, "opendistro.jobscheduler.threadpool")); + executorBuilders.add( + new FixedExecutorBuilder( + settings, + OPEN_DISTRO_JOB_SCHEDULER_THREAD_POOL_NAME, + processorCount, + 200, + "opendistro.jobscheduler.threadpool" + ) + ); return executorBuilders; } @Override public void onIndexModule(IndexModule indexModule) { - if(this.indicesToListen.contains(indexModule.getIndex().getName())) { + if (this.indicesToListen.contains(indexModule.getIndex().getName())) { indexModule.addIndexOperationListener(this.sweeper); log.info("JobSweeper started listening to operations on index {}", indexModule.getIndex().getName()); } @@ -126,7 +149,7 @@ public void loadExtensions(ExtensionLoader loader) { String jobIndexName = extension.getJobIndex(); ScheduledJobParser jobParser = extension.getJobParser(); ScheduledJobRunner runner = extension.getJobRunner(); - if(this.indexToJobProviders.containsKey(jobIndexName)) { + if (this.indexToJobProviders.containsKey(jobIndexName)) { continue; } @@ -142,17 +165,24 @@ public List getNamedXContent() { // register schedule NamedXContentRegistry.Entry scheduleEntry = new NamedXContentRegistry.Entry( - Schedule.class, - new ParseField("schedule"), - ScheduleParser::parse); + Schedule.class, + new ParseField("schedule"), + ScheduleParser::parse + ); registryEntries.add(scheduleEntry); return registryEntries; } - private JobSweeper initSweeper(Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool, - NamedXContentRegistry registry, JobScheduler scheduler, LockService lockService) { - return new JobSweeper(settings, client, clusterService, threadPool, registry, - this.indexToJobProviders, scheduler, lockService); + private JobSweeper initSweeper( + Settings settings, + Client client, + ClusterService clusterService, + ThreadPool threadPool, + NamedXContentRegistry registry, + JobScheduler scheduler, + LockService lockService + ) { + return new JobSweeper(settings, client, clusterService, threadPool, registry, this.indexToJobProviders, scheduler, lockService); } } diff --git a/src/main/java/org/opensearch/jobscheduler/JobSchedulerSettings.java b/src/main/java/org/opensearch/jobscheduler/JobSchedulerSettings.java index 3d0267e4..fa9b8d31 100644 --- a/src/main/java/org/opensearch/jobscheduler/JobSchedulerSettings.java +++ b/src/main/java/org/opensearch/jobscheduler/JobSchedulerSettings.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler; import org.opensearch.common.settings.Setting; @@ -10,32 +13,44 @@ public class JobSchedulerSettings { public static final Setting REQUEST_TIMEOUT = Setting.positiveTimeSetting( - "plugins.jobscheduler.request_timeout", - LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT, - Setting.Property.NodeScope, Setting.Property.Dynamic); + "plugins.jobscheduler.request_timeout", + LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); public static final Setting SWEEP_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "plugins.jobscheduler.sweeper.backoff_millis", - LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_MILLIS, - Setting.Property.NodeScope, Setting.Property.Dynamic); + "plugins.jobscheduler.sweeper.backoff_millis", + LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_MILLIS, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); public static final Setting SWEEP_BACKOFF_RETRY_COUNT = Setting.intSetting( - "plugins.jobscheduler.retry_count", - LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT, - Setting.Property.NodeScope, Setting.Property.Dynamic); + "plugins.jobscheduler.retry_count", + LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); public static final Setting SWEEP_PERIOD = Setting.positiveTimeSetting( - "plugins.jobscheduler.sweeper.period", - LegacyOpenDistroJobSchedulerSettings.SWEEP_PERIOD, - Setting.Property.NodeScope, Setting.Property.Dynamic); + "plugins.jobscheduler.sweeper.period", + LegacyOpenDistroJobSchedulerSettings.SWEEP_PERIOD, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); public static final Setting SWEEP_PAGE_SIZE = Setting.intSetting( - "plugins.jobscheduler.sweeper.page_size", - LegacyOpenDistroJobSchedulerSettings.SWEEP_PAGE_SIZE, - Setting.Property.NodeScope, Setting.Property.Dynamic); + "plugins.jobscheduler.sweeper.page_size", + LegacyOpenDistroJobSchedulerSettings.SWEEP_PAGE_SIZE, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); public static final Setting JITTER_LIMIT = Setting.doubleSetting( - "plugins.jobscheduler.jitter_limit", - LegacyOpenDistroJobSchedulerSettings.JITTER_LIMIT, - Setting.Property.NodeScope, Setting.Property.Dynamic); + "plugins.jobscheduler.jitter_limit", + LegacyOpenDistroJobSchedulerSettings.JITTER_LIMIT, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); } diff --git a/src/main/java/org/opensearch/jobscheduler/LegacyOpenDistroJobSchedulerSettings.java b/src/main/java/org/opensearch/jobscheduler/LegacyOpenDistroJobSchedulerSettings.java index fec0785f..ea185907 100644 --- a/src/main/java/org/opensearch/jobscheduler/LegacyOpenDistroJobSchedulerSettings.java +++ b/src/main/java/org/opensearch/jobscheduler/LegacyOpenDistroJobSchedulerSettings.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler; import org.opensearch.common.settings.Setting; @@ -10,32 +13,52 @@ public class LegacyOpenDistroJobSchedulerSettings { public static final Setting REQUEST_TIMEOUT = Setting.positiveTimeSetting( - "opendistro.jobscheduler.request_timeout", - TimeValue.timeValueSeconds(10), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + "opendistro.jobscheduler.request_timeout", + TimeValue.timeValueSeconds(10), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated + ); public static final Setting SWEEP_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "opendistro.jobscheduler.sweeper.backoff_millis", - TimeValue.timeValueMillis(50), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + "opendistro.jobscheduler.sweeper.backoff_millis", + TimeValue.timeValueMillis(50), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated + ); public static final Setting SWEEP_BACKOFF_RETRY_COUNT = Setting.intSetting( - "opendistro.jobscheduler.retry_count", - 3, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + "opendistro.jobscheduler.retry_count", + 3, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated + ); public static final Setting SWEEP_PERIOD = Setting.positiveTimeSetting( - "opendistro.jobscheduler.sweeper.period", - TimeValue.timeValueMinutes(5), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + "opendistro.jobscheduler.sweeper.period", + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated + ); public static final Setting SWEEP_PAGE_SIZE = Setting.intSetting( - "opendistro.jobscheduler.sweeper.page_size", - 100, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + "opendistro.jobscheduler.sweeper.page_size", + 100, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated + ); public static final Setting JITTER_LIMIT = Setting.doubleSetting( - "opendistro.jobscheduler.jitter_limit", - 0.60, 0, 0.95, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + "opendistro.jobscheduler.jitter_limit", + 0.60, + 0, + 0.95, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated + ); } diff --git a/src/main/java/org/opensearch/jobscheduler/ScheduledJobProvider.java b/src/main/java/org/opensearch/jobscheduler/ScheduledJobProvider.java index c338da58..6a263779 100644 --- a/src/main/java/org/opensearch/jobscheduler/ScheduledJobProvider.java +++ b/src/main/java/org/opensearch/jobscheduler/ScheduledJobProvider.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler; import org.opensearch.jobscheduler.spi.ScheduledJobParser; diff --git a/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java b/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java index a54b47fd..afbcf4ce 100644 --- a/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java +++ b/src/main/java/org/opensearch/jobscheduler/scheduler/JobScheduler.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.scheduler; import org.opensearch.jobscheduler.JobSchedulerPlugin; @@ -60,8 +63,14 @@ public Set getScheduledJobIds(String indexName) { return this.scheduledJobInfo.getJobsByIndex(indexName).keySet(); } - public boolean schedule(String indexName, String docId, ScheduledJobParameter scheduledJobParameter, - ScheduledJobRunner jobRunner, JobDocVersion version, Double jitterLimit) { + public boolean schedule( + String indexName, + String docId, + ScheduledJobParameter scheduledJobParameter, + ScheduledJobRunner jobRunner, + JobDocVersion version, + Double jitterLimit + ) { if (!scheduledJobParameter.isEnabled()) { return false; } @@ -122,11 +131,15 @@ public boolean deschedule(String indexName, String id) { } @VisibleForTesting - boolean reschedule(ScheduledJobParameter jobParameter, JobSchedulingInfo jobInfo, ScheduledJobRunner jobRunner, - JobDocVersion version, Double jitterLimit) { + boolean reschedule( + ScheduledJobParameter jobParameter, + JobSchedulingInfo jobInfo, + ScheduledJobRunner jobRunner, + JobDocVersion version, + Double jitterLimit + ) { if (jobParameter.getEnabledTime() == null) { - log.info("There is no enable time of job {}, this job should never be scheduled.", - jobParameter.getName()); + log.info("There is no enable time of job {}, this job should never be scheduled.", jobParameter.getName()); return false; } @@ -170,8 +183,13 @@ boolean reschedule(ScheduledJobParameter jobParameter, JobSchedulingInfo jobInfo this.reschedule(jobParameter, jobInfo, jobRunner, version, jitterLimit); // invoke job runner - JobExecutionContext context = new JobExecutionContext(jobInfo.getExpectedPreviousExecutionTime(), version, lockService, - jobInfo.getIndexName(), jobInfo.getJobId()); + JobExecutionContext context = new JobExecutionContext( + jobInfo.getExpectedPreviousExecutionTime(), + version, + lockService, + jobInfo.getIndexName(), + jobInfo.getJobId() + ); jobRunner.runJob(jobParameter, context); }; @@ -180,8 +198,13 @@ boolean reschedule(ScheduledJobParameter jobParameter, JobSchedulingInfo jobInfo return false; } - jobInfo.setScheduledCancellable(this.threadPool.schedule(runnable, new TimeValue(duration.toNanos(), - TimeUnit.NANOSECONDS), JobSchedulerPlugin.OPEN_DISTRO_JOB_SCHEDULER_THREAD_POOL_NAME)); + jobInfo.setScheduledCancellable( + this.threadPool.schedule( + runnable, + new TimeValue(duration.toNanos(), TimeUnit.NANOSECONDS), + JobSchedulerPlugin.OPEN_DISTRO_JOB_SCHEDULER_THREAD_POOL_NAME + ) + ); return true; } diff --git a/src/main/java/org/opensearch/jobscheduler/scheduler/JobSchedulingInfo.java b/src/main/java/org/opensearch/jobscheduler/scheduler/JobSchedulingInfo.java index 22f1856e..f55c5dcc 100644 --- a/src/main/java/org/opensearch/jobscheduler/scheduler/JobSchedulingInfo.java +++ b/src/main/java/org/opensearch/jobscheduler/scheduler/JobSchedulingInfo.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.scheduler; import org.opensearch.jobscheduler.spi.ScheduledJobParameter; diff --git a/src/main/java/org/opensearch/jobscheduler/scheduler/ScheduledJobInfo.java b/src/main/java/org/opensearch/jobscheduler/scheduler/ScheduledJobInfo.java index 6fd375f0..814d02a5 100644 --- a/src/main/java/org/opensearch/jobscheduler/scheduler/ScheduledJobInfo.java +++ b/src/main/java/org/opensearch/jobscheduler/scheduler/ScheduledJobInfo.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.scheduler; import java.util.Map; @@ -19,9 +22,9 @@ public class ScheduledJobInfo { } public Map getJobsByIndex(String indexName) { - if(!this.jobInfoMap.containsKey(indexName)) { + if (!this.jobInfoMap.containsKey(indexName)) { synchronized (this.jobInfoMap) { - if(!this.jobInfoMap.containsKey(indexName)) { + if (!this.jobInfoMap.containsKey(indexName)) { this.jobInfoMap.put(indexName, new ConcurrentHashMap<>()); } } @@ -34,9 +37,9 @@ public JobSchedulingInfo getJobInfo(String indexName, String jobId) { } public void addJob(String indexName, String jobId, JobSchedulingInfo jobInfo) { - if(!this.jobInfoMap.containsKey(indexName)) { + if (!this.jobInfoMap.containsKey(indexName)) { synchronized (this.jobInfoMap) { - if(!this.jobInfoMap.containsKey(indexName)) { + if (!this.jobInfoMap.containsKey(indexName)) { jobInfoMap.put(indexName, new ConcurrentHashMap<>()); } } @@ -46,7 +49,7 @@ public void addJob(String indexName, String jobId, JobSchedulingInfo jobInfo) { } public JobSchedulingInfo removeJob(String indexName, String jobId) { - if(this.jobInfoMap.containsKey(indexName)) { + if (this.jobInfoMap.containsKey(indexName)) { return this.jobInfoMap.get(indexName).remove(jobId); } diff --git a/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java b/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java index 9982299d..e7331462 100644 --- a/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java +++ b/src/main/java/org/opensearch/jobscheduler/sweeper/JobSweeper.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sweeper; import org.opensearch.jobscheduler.JobSchedulerSettings; @@ -92,9 +95,16 @@ public class JobSweeper extends LifecycleListener implements IndexingOperationLi private volatile BackoffPolicy sweepSearchBackoff; private volatile Double jitterLimit; - public JobSweeper(Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool, - NamedXContentRegistry registry, Map indexToProviders, JobScheduler scheduler, - LockService lockService) { + public JobSweeper( + Settings settings, + Client client, + ClusterService clusterService, + ThreadPool threadPool, + NamedXContentRegistry registry, + Map indexToProviders, + JobScheduler scheduler, + LockService lockService + ) { this.client = client; this.clusterService = clusterService; this.threadPool = threadPool; @@ -107,8 +117,7 @@ public JobSweeper(Settings settings, Client client, ClusterService clusterServic this.loadSettings(settings); this.addConfigListeners(); - this.fullSweepExecutor = Executors.newSingleThreadExecutor( - OpenSearchExecutors.daemonThreadFactory("opendistro_job_sweeper")); + this.fullSweepExecutor = Executors.newSingleThreadExecutor(OpenSearchExecutors.daemonThreadFactory("opendistro_job_sweeper")); this.sweptJobs = new ConcurrentHashMap<>(); } @@ -123,39 +132,33 @@ private void loadSettings(Settings settings) { } private void addConfigListeners() { - clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_PERIOD, - timeValue -> { - sweepPeriod = timeValue; - log.debug("Reinitializing background full sweep with period: {}", this.sweepPeriod.getMinutes()); - initBackgroundSweep(); - }); - clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_PAGE_SIZE, - intValue -> { - sweepPageMaxSize = intValue; - log.debug("Setting background sweep page size: {}", this.sweepPageMaxSize); - }); - clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.REQUEST_TIMEOUT, - timeValue -> { - this.sweepSearchTimeout = timeValue; - log.debug("Setting background sweep search timeout: {}", this.sweepSearchTimeout.getMinutes()); - }); - clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_BACKOFF_MILLIS, - timeValue -> { - this.sweepSearchBackoffMillis = timeValue; - this.sweepSearchBackoff = this.updateRetryPolicy(); - log.debug("Setting background sweep search backoff: {}", this.sweepSearchBackoffMillis.getMillis()); - }); - clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT, - intValue -> { - this.sweepSearchBackoffRetryCount = intValue; - this.sweepSearchBackoff = this.updateRetryPolicy(); - log.debug("Setting background sweep search backoff retry count: {}", this.sweepSearchBackoffRetryCount); - }); - clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.JITTER_LIMIT, - doubleValue -> { - this.jitterLimit = doubleValue; - log.debug("Setting background sweep jitter limit: {}", this.jitterLimit); - }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_PERIOD, timeValue -> { + sweepPeriod = timeValue; + log.debug("Reinitializing background full sweep with period: {}", this.sweepPeriod.getMinutes()); + initBackgroundSweep(); + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_PAGE_SIZE, intValue -> { + sweepPageMaxSize = intValue; + log.debug("Setting background sweep page size: {}", this.sweepPageMaxSize); + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.REQUEST_TIMEOUT, timeValue -> { + this.sweepSearchTimeout = timeValue; + log.debug("Setting background sweep search timeout: {}", this.sweepSearchTimeout.getMinutes()); + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_BACKOFF_MILLIS, timeValue -> { + this.sweepSearchBackoffMillis = timeValue; + this.sweepSearchBackoff = this.updateRetryPolicy(); + log.debug("Setting background sweep search backoff: {}", this.sweepSearchBackoffMillis.getMillis()); + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT, intValue -> { + this.sweepSearchBackoffRetryCount = intValue; + this.sweepSearchBackoff = this.updateRetryPolicy(); + log.debug("Setting background sweep search backoff retry count: {}", this.sweepSearchBackoffRetryCount); + }); + clusterService.getClusterSettings().addSettingsUpdateConsumer(JobSchedulerSettings.JITTER_LIMIT, doubleValue -> { + this.jitterLimit = doubleValue; + log.debug("Setting background sweep jitter limit: {}", this.jitterLimit); + }); } private BackoffPolicy updateRetryPolicy() { @@ -204,8 +207,9 @@ public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult re @Override public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) { if (result.getResultType() == Engine.Result.Type.FAILURE) { - ConcurrentHashMap shardJobs = this.sweptJobs.containsKey(shardId) ? - this.sweptJobs.get(shardId) : new ConcurrentHashMap<>(); + ConcurrentHashMap shardJobs = this.sweptJobs.containsKey(shardId) + ? this.sweptJobs.get(shardId) + : new ConcurrentHashMap<>(); JobDocVersion version = shardJobs.get(delete.id()); log.debug("Deletion failed for scheduled job {}. Continuing with current version {}", delete.id(), version); return; @@ -214,10 +218,13 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResul if (this.scheduler.getScheduledJobIds(shardId.getIndexName()).contains(delete.id())) { log.info("Descheduling job {} on index {}", delete.id(), shardId.getIndexName()); this.scheduler.deschedule(shardId.getIndexName(), delete.id()); - lockService.deleteLock(LockModel.generateLockId(shardId.getIndexName(), delete.id()), ActionListener.wrap( + lockService.deleteLock( + LockModel.generateLockId(shardId.getIndexName(), delete.id()), + ActionListener.wrap( deleted -> log.debug("Deleted lock: {}", deleted), exception -> log.debug("Failed to delete lock", exception) - )); + ) + ); } } @@ -242,8 +249,12 @@ void sweep(ShardId shardId, String docId, BytesReference jobSource, JobDocVersio if (jobSource != null) { try { ScheduledJobProvider provider = this.indexToProviders.get(shardId.getIndexName()); - XContentParser parser = XContentHelper.createParser(this.xContentRegistry, LoggingDeprecationHandler.INSTANCE, - jobSource, XContentType.JSON); + XContentParser parser = XContentHelper.createParser( + this.xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + jobSource, + XContentType.JSON + ); ScheduledJobParameter jobParameter = provider.getJobParser().parse(parser, docId, jobDocVersion); if (jobParameter == null) { // allow parser to return null, which means this is not a scheduled job document. @@ -297,13 +308,16 @@ private TimeValue getFullSweepElapsedTime() { private Map> getLocalShards(ClusterState clusterState, String localNodeId, String indexName) { List allShards = clusterState.routingTable().allShards(indexName); // group shards by shard id - Map> shards = allShards.stream().filter(ShardRouting::active) - .collect(Collectors.groupingBy(ShardRouting::shardId, - Collectors.mapping(shardRouting -> shardRouting, Collectors.toList()))); + Map> shards = allShards.stream() + .filter(ShardRouting::active) + .collect(Collectors.groupingBy(ShardRouting::shardId, Collectors.mapping(shardRouting -> shardRouting, Collectors.toList()))); // filter out shards not on local node - return shards.entrySet().stream().filter((entry) -> entry.getValue().stream() - .filter((shardRouting -> shardRouting.currentNodeId().equals(localNodeId))).count() > 0) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + return shards.entrySet() + .stream() + .filter( + (entry) -> entry.getValue().stream().filter((shardRouting -> shardRouting.currentNodeId().equals(localNodeId))).count() > 0 + ) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } private void sweepAllJobIndices() { @@ -333,9 +347,12 @@ private void sweepIndex(String indexName) { while (sweptJobIter.hasNext()) { Map.Entry> entry = sweptJobIter.next(); if (entry.getKey().getIndexName().equals(indexName) && !localShards.containsKey(entry.getKey())) { - log.info("Descheduling jobs of shard {} index {} as the shard is removed from this node.", - entry.getKey().getId(), indexName); - //shard is removed, deschedule jobs of this shard + log.info( + "Descheduling jobs of shard {} index {} as the shard is removed from this node.", + entry.getKey().getId(), + indexName + ); + // shard is removed, deschedule jobs of this shard this.scheduler.bulkDeschedule(indexName, entry.getValue().keySet()); sweptJobIter.remove(); } @@ -354,8 +371,9 @@ private void sweepIndex(String indexName) { } private void sweepShard(ShardId shardId, ShardNodes shardNodes, String startAfter) { - ConcurrentHashMap currentJobs = this.sweptJobs.containsKey(shardId) ? - this.sweptJobs.get(shardId) : new ConcurrentHashMap<>(); + ConcurrentHashMap currentJobs = this.sweptJobs.containsKey(shardId) + ? this.sweptJobs.get(shardId) + : new ConcurrentHashMap<>(); for (String jobId : currentJobs.keySet()) { if (!shardNodes.isOwningNode(jobId)) { @@ -366,19 +384,22 @@ private void sweepShard(ShardId shardId, ShardNodes shardNodes, String startAfte String searchAfter = startAfter == null ? "" : startAfter; while (searchAfter != null) { - SearchRequest jobSearchRequest = new SearchRequest() - .indices(shardId.getIndexName()) - .preference("_shards:" + shardId.id() + "|_only_local") - .source(new SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .sort(new FieldSortBuilder("_id").unmappedType("keyword").missing("_last")) - .searchAfter(new String[]{searchAfter}) - .size(this.sweepPageMaxSize) - .query(QueryBuilders.matchAllQuery())); - - SearchResponse response = this.retry((searchRequest) -> this.client.search(searchRequest), - jobSearchRequest, this.sweepSearchBackoff).actionGet(this.sweepSearchTimeout); + SearchRequest jobSearchRequest = new SearchRequest().indices(shardId.getIndexName()) + .preference("_shards:" + shardId.id() + "|_only_local") + .source( + new SearchSourceBuilder().version(true) + .seqNoAndPrimaryTerm(true) + .sort(new FieldSortBuilder("_id").unmappedType("keyword").missing("_last")) + .searchAfter(new String[] { searchAfter }) + .size(this.sweepPageMaxSize) + .query(QueryBuilders.matchAllQuery()) + ); + + SearchResponse response = this.retry( + (searchRequest) -> this.client.search(searchRequest), + jobSearchRequest, + this.sweepSearchBackoff + ).actionGet(this.sweepSearchTimeout); if (response.status() != RestStatus.OK) { log.error("Error sweeping shard {}, failed querying jobs on this shard", shardId); return; @@ -386,8 +407,12 @@ private void sweepShard(ShardId shardId, ShardNodes shardNodes, String startAfte for (SearchHit hit : response.getHits()) { String jobId = hit.getId(); if (shardNodes.isOwningNode(jobId)) { - this.sweep(shardId, jobId, hit.getSourceRef(), new JobDocVersion(hit.getPrimaryTerm(), hit.getSeqNo(), - hit.getVersion())); + this.sweep( + shardId, + jobId, + hit.getSourceRef(), + new JobDocVersion(hit.getPrimaryTerm(), hit.getSeqNo(), hit.getVersion()) + ); } } if (response.getHits() == null || response.getHits().getHits().length < 1) { @@ -400,8 +425,11 @@ private void sweepShard(ShardId shardId, ShardNodes shardNodes, String startAfte } private R retry(Function function, T param, BackoffPolicy backoffPolicy) { - Set retryalbeStatus = Sets.newHashSet(RestStatus.BAD_GATEWAY, RestStatus.GATEWAY_TIMEOUT, - RestStatus.SERVICE_UNAVAILABLE); + Set retryalbeStatus = Sets.newHashSet( + RestStatus.BAD_GATEWAY, + RestStatus.GATEWAY_TIMEOUT, + RestStatus.SERVICE_UNAVAILABLE + ); Iterator iter = backoffPolicy.iterator(); do { try { @@ -443,8 +471,9 @@ boolean isOwningNode(String jobId) { return false; } int jobHashCode = Murmur3HashFunction.hash(jobId); - String nodeId = this.circle.higherEntry(jobHashCode) == null ? this.circle.firstEntry().getValue() - : this.circle.higherEntry(jobHashCode).getValue(); + String nodeId = this.circle.higherEntry(jobHashCode) == null + ? this.circle.firstEntry().getValue() + : this.circle.higherEntry(jobHashCode).getValue(); return this.localNodeId.equals(nodeId); } } diff --git a/src/main/java/org/opensearch/jobscheduler/utils/VisibleForTesting.java b/src/main/java/org/opensearch/jobscheduler/utils/VisibleForTesting.java index d38ac66f..fbd8b2f6 100644 --- a/src/main/java/org/opensearch/jobscheduler/utils/VisibleForTesting.java +++ b/src/main/java/org/opensearch/jobscheduler/utils/VisibleForTesting.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.utils; public @interface VisibleForTesting { diff --git a/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginIT.java b/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginIT.java index 4785b69b..266e2336 100644 --- a/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginIT.java +++ b/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginIT.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler; import org.junit.Assert; @@ -30,12 +33,13 @@ public void testPluginsAreInstalled() { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); nodesInfoRequest.addMetric(NodesInfoRequest.Metric.PLUGINS.metricName()); - NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest) - .actionGet(); - List pluginInfos = nodesInfoResponse.getNodes().stream() - .flatMap((Function>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class) - .getPluginInfos().stream()).collect(Collectors.toList()); - Assert.assertTrue(pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName() - .equals("opensearch-job-scheduler"))); + NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); + List pluginInfos = nodesInfoResponse.getNodes() + .stream() + .flatMap( + (Function>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream() + ) + .collect(Collectors.toList()); + Assert.assertTrue(pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName().equals("opensearch-job-scheduler"))); } -} \ No newline at end of file +} diff --git a/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginRestIT.java b/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginRestIT.java index 85e62806..7044ee6b 100644 --- a/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginRestIT.java +++ b/src/test/java/org/opensearch/jobscheduler/JobSchedulerPluginRestIT.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler; import org.junit.Assert; @@ -23,9 +26,15 @@ public class JobSchedulerPluginRestIT extends OpenSearchRestTestCase { public void testPluginsAreInstalled() throws IOException { Request request = new Request("GET", "/_cat/plugins?s=component&h=name,component,version,description&format=json"); Response response = client().performRequest(request); - List pluginsList = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, response.getEntity().getContent()).list(); - Assert.assertTrue(pluginsList.stream().map(o -> (Map) o).anyMatch(plugin -> plugin.get("component") - .equals("opensearch-job-scheduler"))); + List pluginsList = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).list(); + Assert.assertTrue( + pluginsList.stream() + .map(o -> (Map) o) + .anyMatch(plugin -> plugin.get("component").equals("opensearch-job-scheduler")) + ); } -} \ No newline at end of file +} diff --git a/src/test/java/org/opensearch/jobscheduler/JobSchedulerSettingsTests.java b/src/test/java/org/opensearch/jobscheduler/JobSchedulerSettingsTests.java index f3fcbfa2..d0642d35 100644 --- a/src/test/java/org/opensearch/jobscheduler/JobSchedulerSettingsTests.java +++ b/src/test/java/org/opensearch/jobscheduler/JobSchedulerSettingsTests.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler; import java.util.Arrays; @@ -14,11 +17,11 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; -@SuppressWarnings({"rawtypes"}) +@SuppressWarnings({ "rawtypes" }) public class JobSchedulerSettingsTests extends OpenSearchTestCase { JobSchedulerPlugin plugin; - + @Before public void setup() { this.plugin = new JobSchedulerPlugin(); @@ -26,7 +29,8 @@ public void setup() { public void testAllLegacyOpenDistroSettingsReturned() { List> settings = plugin.getSettings(); - assertTrue("legacy setting must be returned from settings", + assertTrue( + "legacy setting must be returned from settings", settings.containsAll( Arrays.asList( LegacyOpenDistroJobSchedulerSettings.JITTER_LIMIT, @@ -42,7 +46,8 @@ public void testAllLegacyOpenDistroSettingsReturned() { public void testAllOpenSearchSettingsReturned() { List> settings = plugin.getSettings(); - assertTrue("legacy setting must be returned from settings", + assertTrue( + "legacy setting must be returned from settings", settings.containsAll( Arrays.asList( JobSchedulerSettings.JITTER_LIMIT, @@ -58,15 +63,15 @@ public void testAllOpenSearchSettingsReturned() { public void testLegacyOpenDistroSettingsFallback() { assertEquals( - JobSchedulerSettings.REQUEST_TIMEOUT.get(Settings.EMPTY), + JobSchedulerSettings.REQUEST_TIMEOUT.get(Settings.EMPTY), LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT.get(Settings.EMPTY) ); } public void testSettingsGetValue() { Settings settings = Settings.builder().put("plugins.jobscheduler.request_timeout", "42s").build(); - assertEquals(JobSchedulerSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(42)); - assertEquals(LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(10)); + assertEquals(JobSchedulerSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(42)); + assertEquals(LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(10)); } public void testSettingsGetValueWithLegacyFallback() { @@ -77,22 +82,23 @@ public void testSettingsGetValueWithLegacyFallback() { .put("opendistro.jobscheduler.sweeper.period", "4s") .put("opendistro.jobscheduler.sweeper.page_size", 5) .put("opendistro.jobscheduler.jitter_limit", 6) - .build(); - - assertEquals(JobSchedulerSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(1)); - assertEquals(JobSchedulerSettings.SWEEP_BACKOFF_MILLIS.get(settings), TimeValue.timeValueMillis(2)); - assertEquals(JobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT.get(settings), Integer.valueOf(3)); - assertEquals(JobSchedulerSettings.SWEEP_PERIOD.get(settings), TimeValue.timeValueSeconds(4)); - assertEquals(JobSchedulerSettings.SWEEP_PAGE_SIZE.get(settings), Integer.valueOf(5)); - assertEquals(JobSchedulerSettings.JITTER_LIMIT.get(settings), Double.valueOf(6.0)); + .build(); - assertSettingDeprecationsAndWarnings(new Setting[]{ - LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT, - LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_MILLIS, - LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT, - LegacyOpenDistroJobSchedulerSettings.SWEEP_PERIOD, - LegacyOpenDistroJobSchedulerSettings.SWEEP_PAGE_SIZE, - LegacyOpenDistroJobSchedulerSettings.JITTER_LIMIT - }); + assertEquals(JobSchedulerSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(1)); + assertEquals(JobSchedulerSettings.SWEEP_BACKOFF_MILLIS.get(settings), TimeValue.timeValueMillis(2)); + assertEquals(JobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT.get(settings), Integer.valueOf(3)); + assertEquals(JobSchedulerSettings.SWEEP_PERIOD.get(settings), TimeValue.timeValueSeconds(4)); + assertEquals(JobSchedulerSettings.SWEEP_PAGE_SIZE.get(settings), Integer.valueOf(5)); + assertEquals(JobSchedulerSettings.JITTER_LIMIT.get(settings), Double.valueOf(6.0)); + + assertSettingDeprecationsAndWarnings( + new Setting[] { + LegacyOpenDistroJobSchedulerSettings.REQUEST_TIMEOUT, + LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_MILLIS, + LegacyOpenDistroJobSchedulerSettings.SWEEP_BACKOFF_RETRY_COUNT, + LegacyOpenDistroJobSchedulerSettings.SWEEP_PERIOD, + LegacyOpenDistroJobSchedulerSettings.SWEEP_PAGE_SIZE, + LegacyOpenDistroJobSchedulerSettings.JITTER_LIMIT } + ); } } diff --git a/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java b/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java index 8c051bf3..86ad5ee8 100644 --- a/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java +++ b/src/test/java/org/opensearch/jobscheduler/scheduler/JobSchedulerTests.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.scheduler; import org.opensearch.jobscheduler.spi.JobDocVersion; @@ -28,7 +31,7 @@ import java.util.List; @RunWith(RandomizedRunner.class) -@SuppressWarnings({"rawtypes"}) +@SuppressWarnings({ "rawtypes" }) public class JobSchedulerTests extends OpenSearchTestCase { private ThreadPool threadPool; @@ -47,15 +50,20 @@ public void testSchedule() { Schedule schedule = Mockito.mock(Schedule.class); ScheduledJobRunner runner = Mockito.mock(ScheduledJobRunner.class); - ScheduledJobParameter jobParameter = buildScheduledJobParameter("job-id", "dummy job name", - Instant.now().minus(1, ChronoUnit.HOURS), Instant.now(), schedule, true); + ScheduledJobParameter jobParameter = buildScheduledJobParameter( + "job-id", + "dummy job name", + Instant.now().minus(1, ChronoUnit.HOURS), + Instant.now(), + schedule, + true + ); Mockito.when(schedule.getNextExecutionTime(Mockito.any())).thenReturn(Instant.now().plus(1, ChronoUnit.MINUTES)); Scheduler.ScheduledCancellable cancellable = Mockito.mock(Scheduler.ScheduledCancellable.class); Mockito.when(this.threadPool.schedule(Mockito.any(), Mockito.any(), Mockito.anyString())).thenReturn(cancellable); - boolean scheduled = this.scheduler.schedule("index", "job-id", jobParameter, runner, dummyVersion, jitterLimit); Assert.assertTrue(scheduled); Mockito.verify(this.threadPool, Mockito.times(1)).schedule(Mockito.any(), Mockito.any(), Mockito.anyString()); @@ -67,9 +75,14 @@ public void testSchedule() { } public void testSchedule_disabledJob() { - ScheduledJobParameter jobParameter = buildScheduledJobParameter("job-id", "dummy job name", - Instant.now().minus(1, ChronoUnit.HOURS), Instant.now(), - new CronSchedule("* * * * *", ZoneId.systemDefault()), false); + ScheduledJobParameter jobParameter = buildScheduledJobParameter( + "job-id", + "dummy job name", + Instant.now().minus(1, ChronoUnit.HOURS), + Instant.now(), + new CronSchedule("* * * * *", ZoneId.systemDefault()), + false + ); boolean scheduled = this.scheduler.schedule("index-name", "job-id", jobParameter, null, dummyVersion, jitterLimit); Assert.assertFalse(scheduled); } @@ -126,15 +139,21 @@ public void testDeschedule_noSuchJob() { } public void testReschedule_noEnableTime() { - ScheduledJobParameter jobParameter = buildScheduledJobParameter("job-id", "dummy job name", - null, null, null, false); + ScheduledJobParameter jobParameter = buildScheduledJobParameter("job-id", "dummy job name", null, null, null, false); Assert.assertFalse(this.scheduler.reschedule(jobParameter, null, null, dummyVersion, jitterLimit)); } public void testReschedule_jobDescheduled() { Schedule schedule = Mockito.mock(Schedule.class); - ScheduledJobParameter jobParameter = buildScheduledJobParameter("job-id", "dummy job name", - Instant.now().minus(1, ChronoUnit.HOURS), Instant.now(), schedule, false, 0.6); + ScheduledJobParameter jobParameter = buildScheduledJobParameter( + "job-id", + "dummy job name", + Instant.now().minus(1, ChronoUnit.HOURS), + Instant.now(), + schedule, + false, + 0.6 + ); JobSchedulingInfo jobSchedulingInfo = new JobSchedulingInfo("job-index", "job-id", jobParameter); Instant now = Instant.now(); jobSchedulingInfo.setDescheduled(true); @@ -148,8 +167,15 @@ public void testReschedule_jobDescheduled() { public void testReschedule_scheduleJob() { Schedule schedule = Mockito.mock(Schedule.class); - ScheduledJobParameter jobParameter = buildScheduledJobParameter("job-id", "dummy job name", - Instant.now().minus(1, ChronoUnit.HOURS), Instant.now(), schedule, false, 0.6); + ScheduledJobParameter jobParameter = buildScheduledJobParameter( + "job-id", + "dummy job name", + Instant.now().minus(1, ChronoUnit.HOURS), + Instant.now(), + schedule, + false, + 0.6 + ); JobSchedulingInfo jobSchedulingInfo = new JobSchedulingInfo("job-index", "job-id", jobParameter); Instant now = Instant.now(); jobSchedulingInfo.setDescheduled(false); @@ -166,13 +192,26 @@ public void testReschedule_scheduleJob() { Mockito.verify(this.threadPool).schedule(Mockito.any(), Mockito.any(), Mockito.anyString()); } - static ScheduledJobParameter buildScheduledJobParameter(String id, String name, Instant updateTime, - Instant enableTime, Schedule schedule, boolean enabled) { + static ScheduledJobParameter buildScheduledJobParameter( + String id, + String name, + Instant updateTime, + Instant enableTime, + Schedule schedule, + boolean enabled + ) { return buildScheduledJobParameter(id, name, updateTime, enableTime, schedule, enabled, null); } - static ScheduledJobParameter buildScheduledJobParameter(String id, String name, Instant updateTime, - Instant enableTime, Schedule schedule, boolean enabled, Double jitter) { + static ScheduledJobParameter buildScheduledJobParameter( + String id, + String name, + Instant updateTime, + Instant enableTime, + Schedule schedule, + boolean enabled, + Double jitter + ) { return new ScheduledJobParameter() { @Override public String getName() { @@ -199,7 +238,8 @@ public boolean isEnabled() { return enabled; } - @Override public Double getJitter() { + @Override + public Double getJitter() { return jitter; } diff --git a/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java b/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java index 56b5a939..f42b5d03 100644 --- a/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java +++ b/src/test/java/org/opensearch/jobscheduler/sweeper/JobSweeperTests.java @@ -1,8 +1,11 @@ /* * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. */ - package org.opensearch.jobscheduler.sweeper; import org.opensearch.jobscheduler.JobSchedulerSettings; @@ -60,7 +63,7 @@ import java.util.Map; import java.util.Set; -@SuppressWarnings({"unchecked", "rawtypes"}) +@SuppressWarnings({ "unchecked", "rawtypes" }) public class JobSweeperTests extends OpenSearchAllocationTestCase { private Client client; @@ -87,7 +90,7 @@ public void setup() throws IOException { this.jobParser = Mockito.mock(ScheduledJobParser.class); // NamedXContentRegistry.Entry xContentRegistryEntry = new NamedXContentRegistry.Entry(ScheduledJobParameter.class, - // new ParseField("JOB_TYPE"), this.jobParser); + // new ParseField("JOB_TYPE"), this.jobParser); List namedXContentRegistryEntries = new ArrayList<>(); // namedXContentRegistryEntries.add(xContentRegistryEntry); this.xContentRegistry = new NamedXContentRegistry(namedXContentRegistryEntries); @@ -109,13 +112,20 @@ public void setup() throws IOException { ClusterService originClusterService = ClusterServiceUtils.createClusterService(this.threadPool, discoveryNode, clusterSettings); this.clusterService = Mockito.spy(originClusterService); - ScheduledJobProvider jobProvider = new ScheduledJobProvider("JOB_TYPE", "job-index-name", - this.jobParser, this.jobRunner); + ScheduledJobProvider jobProvider = new ScheduledJobProvider("JOB_TYPE", "job-index-name", this.jobParser, this.jobRunner); Map jobProviderMap = new HashMap<>(); jobProviderMap.put("index-name", jobProvider); - sweeper = new JobSweeper(settings, this.client, this.clusterService, this.threadPool, xContentRegistry, - jobProviderMap, scheduler, new LockService(client, clusterService)); + sweeper = new JobSweeper( + settings, + this.client, + this.clusterService, + this.threadPool, + xContentRegistry, + jobProviderMap, + scheduler, + new LockService(client, clusterService) + ); } public void testAfterStart() { @@ -132,8 +142,7 @@ public void testInitBackgroundSweep() { this.sweeper.initBackgroundSweep(); Mockito.verify(cancellable).cancel(); - Mockito.verify(this.threadPool, Mockito.times(2)) - .scheduleWithFixedDelay(Mockito.any(), Mockito.any(), Mockito.anyString()); + Mockito.verify(this.threadPool, Mockito.times(2)).scheduleWithFixedDelay(Mockito.any(), Mockito.any(), Mockito.anyString()); } public void testBeforeStop() { @@ -157,41 +166,39 @@ public void testPostIndex() { Engine.Index index = this.getIndexOperation(); Engine.IndexResult indexResult = new Engine.IndexResult(1L, 1L, 1L, true); - Metadata metadata = Metadata.builder() - .put(createIndexMetadata("index-name", 1, 3)) - .build(); - RoutingTable routingTable = new RoutingTable.Builder() - .add(new IndexRoutingTable.Builder(metadata.index("index-name").getIndex()) - .initializeAsNew(metadata.index("index-name")).build()) - .build(); + Metadata metadata = Metadata.builder().put(createIndexMetadata("index-name", 1, 3)).build(); + RoutingTable routingTable = new RoutingTable.Builder().add( + new IndexRoutingTable.Builder(metadata.index("index-name").getIndex()).initializeAsNew(metadata.index("index-name")).build() + ).build(); ClusterState clusterState = ClusterState.builder(new ClusterName("cluster-name")) - .metadata(metadata) - .routingTable(routingTable) - .build(); + .metadata(metadata) + .routingTable(routingTable) + .build(); clusterState = this.addNodesToCluter(clusterState, 2); clusterState = this.initializeAllShards(clusterState); OngoingStubbing stubbing = null; Iterator iter = clusterState.getNodes().iterator(); - while(iter.hasNext()) { - if(stubbing == null) { + while (iter.hasNext()) { + if (stubbing == null) { stubbing = Mockito.when(this.clusterService.localNode()).thenReturn(iter.next()); - }else { + } else { stubbing = stubbing.thenReturn(iter.next()); } } Mockito.when(this.clusterService.state()).thenReturn(clusterState); JobSweeper testSweeper = Mockito.spy(this.sweeper); - Mockito.doNothing().when(testSweeper).sweep(Mockito.any(), Mockito.anyString(), Mockito.any(BytesReference.class), - Mockito.any(JobDocVersion.class)); - for (int i = 0; i actionFuture = Mockito.mock(ActionFuture.class); Mockito.when(this.client.delete(Mockito.any())).thenReturn(actionFuture); - DeleteResponse response = new DeleteResponse(new ShardId(new Index("name","uuid"), 0), "id", 1L, 2L, 3L, true); + DeleteResponse response = new DeleteResponse(new ShardId(new Index("name", "uuid"), 0), "id", 1L, 2L, 3L, true); Mockito.when(actionFuture.actionGet()).thenReturn(response); this.sweeper.postDelete(shardId, delete, deleteResult); @@ -237,8 +244,15 @@ public void testSweep() throws IOException { ShardId shardId = new ShardId(new Index("index-name", IndexMetadata.INDEX_UUID_NA_VALUE), 1); this.sweeper.sweep(shardId, "id", this.getTestJsonSource(), new JobDocVersion(1L, 1L, 2L)); - Mockito.verify(this.scheduler, Mockito.times(0)).schedule(Mockito.anyString(), Mockito.anyString(), - Mockito.any(), Mockito.any(), Mockito.any(JobDocVersion.class), Mockito.any(Double.class)); + Mockito.verify(this.scheduler, Mockito.times(0)) + .schedule( + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(), + Mockito.any(), + Mockito.any(JobDocVersion.class), + Mockito.any(Double.class) + ); ScheduledJobParameter mockJobParameter = Mockito.mock(ScheduledJobParameter.class); Mockito.when(mockJobParameter.isEnabled()).thenReturn(true); @@ -246,13 +260,20 @@ public void testSweep() throws IOException { .thenReturn(mockJobParameter); this.sweeper.sweep(shardId, "id", this.getTestJsonSource(), new JobDocVersion(1L, 1L, 2L)); - Mockito.verify(this.scheduler).schedule(Mockito.anyString(), Mockito.anyString(), Mockito.any(), Mockito.any(), - Mockito.any(JobDocVersion.class), Mockito.any(Double.class)); + Mockito.verify(this.scheduler) + .schedule( + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(), + Mockito.any(), + Mockito.any(JobDocVersion.class), + Mockito.any(Double.class) + ); } private ClusterState addNodesToCluter(ClusterState clusterState, int nodeCount) { DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder(); - for (int i = 1; i<=nodeCount; i++) { + for (int i = 1; i <= nodeCount; i++) { nodeBuilder.add(OpenSearchAllocationTestCase.newNode("node-" + i)); } @@ -260,15 +281,21 @@ private ClusterState addNodesToCluter(ClusterState clusterState, int nodeCount) } private ClusterState initializeAllShards(ClusterState clusterState) { - AllocationService allocationService = createAllocationService(Settings.builder() + AllocationService allocationService = createAllocationService( + Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", Integer.MAX_VALUE) .put("cluster.routing.allocation.node_initial_parimaries_recoveries", Integer.MAX_VALUE) - .build()); + .build() + ); clusterState = allocationService.reroute(clusterState, "reroute"); - clusterState = allocationService.applyStartedShards(clusterState, clusterState.getRoutingNodes() - .shardsWithState("index-name", ShardRoutingState.INITIALIZING)); // start primary shard - clusterState = allocationService.applyStartedShards(clusterState, clusterState.getRoutingNodes() - .shardsWithState("index-name", ShardRoutingState.INITIALIZING)); // start replica shards + clusterState = allocationService.applyStartedShards( + clusterState, + clusterState.getRoutingNodes().shardsWithState("index-name", ShardRoutingState.INITIALIZING) + ); // start primary shard + clusterState = allocationService.applyStartedShards( + clusterState, + clusterState.getRoutingNodes().shardsWithState("index-name", ShardRoutingState.INITIALIZING) + ); // start replica shards return clusterState; } @@ -297,28 +324,27 @@ private Engine.Delete getDeleteOperation(String docId) { } private BytesReference getTestJsonSource() { - return new BytesArray("{\n" + - "\t\"id\": \"id\",\n" + - "\t\"name\": \"name\",\n" + - "\t\"version\": 3,\n" + - "\t\"enabled\": true,\n" + - "\t\"schedule\": {\n" + - "\t\t\"cron\": {\n" + - "\t\t\t\"expression\": \"* * * * *\",\n" + - "\t\t\t\"timezone\": \"PST8PDT\"\n" + - "\t\t}\n" + - "\t},\n" + - "\t\"sample_param\": \"sample parameter\",\n" + - "\t\"enable_time\": 1550105987448,\n" + - "\t\"last_update_time\": 1550105987448\n" + - "}"); + return new BytesArray( + "{\n" + + "\t\"id\": \"id\",\n" + + "\t\"name\": \"name\",\n" + + "\t\"version\": 3,\n" + + "\t\"enabled\": true,\n" + + "\t\"schedule\": {\n" + + "\t\t\"cron\": {\n" + + "\t\t\t\"expression\": \"* * * * *\",\n" + + "\t\t\t\"timezone\": \"PST8PDT\"\n" + + "\t\t}\n" + + "\t},\n" + + "\t\"sample_param\": \"sample parameter\",\n" + + "\t\"enable_time\": 1550105987448,\n" + + "\t\"last_update_time\": 1550105987448\n" + + "}" + ); } private IndexMetadata.Builder createIndexMetadata(String indexName, int replicaNumber, int shardNumber) { Settings defaultSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(); - return new IndexMetadata.Builder(indexName) - .settings(defaultSettings) - .numberOfReplicas(replicaNumber) - .numberOfShards(shardNumber); + return new IndexMetadata.Builder(indexName).settings(defaultSettings).numberOfReplicas(replicaNumber).numberOfShards(shardNumber); } }