From a754afc03e7552de9a10e16c010e679fb46e5669 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 4 Feb 2022 17:23:46 +0100 Subject: [PATCH 01/16] isPipelineRunning method --- .../source/class/osparc/data/model/Study.js | 10 ++++++++ .../class/osparc/desktop/StartStopButtons.js | 24 ++++++------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/services/web/client/source/class/osparc/data/model/Study.js b/services/web/client/source/class/osparc/data/model/Study.js index 799f45413d5..60754e7401d 100644 --- a/services/web/client/source/class/osparc/data/model/Study.js +++ b/services/web/client/source/class/osparc/data/model/Study.js @@ -390,6 +390,16 @@ qx.Class.define("osparc.data.model.Study", { return null; }, + isPipelineRunning: function() { + const pipelineState = this.getPipelineState(); + return [ + "PUBLISHED", + "PENDING", + "STARTED", + "RETRY" + ].includes(pipelineState); + }, + isLocked: function() { if (this.getState() && "locked" in this.getState()) { return this.getState()["locked"]["value"]; diff --git a/services/web/client/source/class/osparc/desktop/StartStopButtons.js b/services/web/client/source/class/osparc/desktop/StartStopButtons.js index 34cd91cf930..7e13b8221ba 100644 --- a/services/web/client/source/class/osparc/desktop/StartStopButtons.js +++ b/services/web/client/source/class/osparc/desktop/StartStopButtons.js @@ -208,23 +208,13 @@ qx.Class.define("osparc.desktop.StartStopButtons", { if (study) { const startButtons = this.__getStartButtons(); const stopButton = this.__stopButton; - const pipelineState = study.getPipelineState(); - if (pipelineState) { - switch (pipelineState) { - case "PENDING": - case "PUBLISHED": - case "STARTED": - startButtons.forEach(startButton => startButton.setFetching(true)); - stopButton.setEnabled(true); - break; - case "NOT_STARTED": - case "SUCCESS": - case "FAILED": - default: - startButtons.forEach(startButton => startButton.setFetching(false)); - stopButton.setEnabled(false); - break; - } + const isPipelineRunning = study.isPipelineRunning(); + if (isPipelineRunning) { + startButtons.forEach(startButton => startButton.setFetching(true)); + stopButton.setEnabled(true); + } else { + startButtons.forEach(startButton => startButton.setFetching(false)); + stopButton.setEnabled(false); } } } From 33700cf699c4a8b54dcdce8c52354612c179bc87 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 4 Feb 2022 18:10:52 +0100 Subject: [PATCH 02/16] disable forms if pipeline is running --- .../source/class/osparc/data/model/Study.js | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/services/web/client/source/class/osparc/data/model/Study.js b/services/web/client/source/class/osparc/data/model/Study.js index 60754e7401d..152de4a4e61 100644 --- a/services/web/client/source/class/osparc/data/model/Study.js +++ b/services/web/client/source/class/osparc/data/model/Study.js @@ -57,12 +57,14 @@ qx.Class.define("osparc.data.model.Study", { quality: studyData.quality || this.getQuality() }); - const wbData = studyData.workbench || this.getWorkbench(); + const wbData = studyData.workbench || {}; const workbench = new osparc.data.model.Workbench(wbData, studyData.ui); this.setWorkbench(workbench); workbench.setStudy(this); this.setUi(new osparc.data.model.StudyUI(studyData.ui)); + + this.__applyState(); }, properties: { @@ -126,7 +128,7 @@ qx.Class.define("osparc.data.model.Study", { workbench: { check: "osparc.data.model.Workbench", nullable: false, - init: {} + init: null }, ui: { @@ -158,7 +160,8 @@ qx.Class.define("osparc.data.model.Study", { state: { check: "Object", nullable: true, - event: "changeState" + event: "changeState", + apply: "__applyState" }, readOnly: { @@ -424,6 +427,19 @@ qx.Class.define("osparc.data.model.Study", { } }, + __applyState: function() { + if (this.getWorkbench() === null) { + return; + } + const isPipelineRunning = this.isPipelineRunning(); + const nodes = this.getWorkbench().getNodes(true); + for (const node of Object.values(nodes)) { + if (node.isPropertyInitialized("propsForm") && node.getPropsForm()) { + node.getPropsForm().setEnabled(!isPipelineRunning); + } + } + }, + openStudy: function() { const params = { url: { From 1e6d04205dc7da20da6c124f4ea069f561761aab Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 4 Feb 2022 18:12:05 +0100 Subject: [PATCH 03/16] minor --- tests/performance/locust_files/director_services.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/locust_files/director_services.py b/tests/performance/locust_files/director_services.py index 1743004e082..d7e63a900f0 100644 --- a/tests/performance/locust_files/director_services.py +++ b/tests/performance/locust_files/director_services.py @@ -22,7 +22,7 @@ def __init__(self, *args, **kwargs): @task() def get_services(self): self.client.get( - "v0/services?user_id=" + self.user_id, + f"v0/services?user_id={self.user_id}", headers={ "x-simcore-products-name": "osparc", }, From 66b176420a6507ce3dd4071e51318e4fd735c325 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Fri, 4 Feb 2022 18:20:46 +0100 Subject: [PATCH 04/16] logic moved to note --- .../source/class/osparc/data/model/Node.js | 12 +++++++++++- .../source/class/osparc/data/model/Study.js | 18 +----------------- 2 files changed, 12 insertions(+), 18 deletions(-) diff --git a/services/web/client/source/class/osparc/data/model/Node.js b/services/web/client/source/class/osparc/data/model/Node.js index db091cc77a6..4022abbf27f 100644 --- a/services/web/client/source/class/osparc/data/model/Node.js +++ b/services/web/client/source/class/osparc/data/model/Node.js @@ -156,7 +156,8 @@ qx.Class.define("osparc.data.model.Node", { propsForm: { check: "osparc.component.form.renderer.PropForm", init: null, - nullable: true + nullable: true, + apply: "__applyPropsForm" }, propsFormEditor: { @@ -513,6 +514,15 @@ qx.Class.define("osparc.data.model.Node", { } }, + __applyPropsForm: function() { + const checkIsPipelineRunning = () => { + const isPipelineRunning = this.getStudy().isPipelineRunning(); + this.getPropsForm().setEnabled(!isPipelineRunning); + }; + this.getStudy().addListener("changeState", () => checkIsPipelineRunning(), this); + checkIsPipelineRunning(); + }, + getInputsDefaultWidget: function() { return this.__inputsDefaultWidget; }, diff --git a/services/web/client/source/class/osparc/data/model/Study.js b/services/web/client/source/class/osparc/data/model/Study.js index 152de4a4e61..3b00e5c1558 100644 --- a/services/web/client/source/class/osparc/data/model/Study.js +++ b/services/web/client/source/class/osparc/data/model/Study.js @@ -63,8 +63,6 @@ qx.Class.define("osparc.data.model.Study", { workbench.setStudy(this); this.setUi(new osparc.data.model.StudyUI(studyData.ui)); - - this.__applyState(); }, properties: { @@ -160,8 +158,7 @@ qx.Class.define("osparc.data.model.Study", { state: { check: "Object", nullable: true, - event: "changeState", - apply: "__applyState" + event: "changeState" }, readOnly: { @@ -427,19 +424,6 @@ qx.Class.define("osparc.data.model.Study", { } }, - __applyState: function() { - if (this.getWorkbench() === null) { - return; - } - const isPipelineRunning = this.isPipelineRunning(); - const nodes = this.getWorkbench().getNodes(true); - for (const node of Object.values(nodes)) { - if (node.isPropertyInitialized("propsForm") && node.getPropsForm()) { - node.getPropsForm().setEnabled(!isPipelineRunning); - } - } - }, - openStudy: function() { const params = { url: { From 9f398674f4e3648662e7acbf607c6a5cd32a2573 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 4 Feb 2022 18:31:57 +0100 Subject: [PATCH 05/16] moves state logic to model --- .../src/models_library/projects_state.py | 11 +++++++++++ .../simcore_service_director_v2/utils/computations.py | 9 ++------- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/packages/models-library/src/models_library/projects_state.py b/packages/models-library/src/models_library/projects_state.py index b3aecce93cb..867d9a7fc41 100644 --- a/packages/models-library/src/models_library/projects_state.py +++ b/packages/models-library/src/models_library/projects_state.py @@ -27,6 +27,17 @@ class RunningState(str, Enum): FAILED = "FAILED" ABORTED = "ABORTED" + def is_running(self) -> bool: + return self in ( + RunningState.PUBLISHED, + RunningState.PENDING, + RunningState.STARTED, + RunningState.RETRY, + ) + + def is_stopped(self) -> bool: + return not self.is_running() + @unique class DataState(str, Enum): diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index 7a0f427386f..8281bf9eb4b 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -88,13 +88,8 @@ def to_node_class(service_key: str) -> NodeClass: def is_pipeline_running(pipeline_state: RunningState) -> bool: - return pipeline_state in [ - RunningState.PUBLISHED, - RunningState.PENDING, - RunningState.STARTED, - RunningState.RETRY, - ] + return pipeline_state.is_running() def is_pipeline_stopped(pipeline_state: RunningState) -> bool: - return not is_pipeline_running(pipeline_state) + return pipeline_state.is_stopped() From 657d541ad807c09df2a2a04c3b21267ef78b95ae Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 4 Feb 2022 18:34:37 +0100 Subject: [PATCH 06/16] tmp --- .pre-commit-config.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 65b6a5518eb..36b7d2b7cf8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,6 @@ repos: rev: v1.1.0 hooks: - id: pycln - args: [--all] - repo: https://github.com/PyCQA/isort rev: 5.6.4 hooks: From 927a6fd4e80173133ec6958b871d4deffd849756 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 4 Feb 2022 18:34:46 +0100 Subject: [PATCH 07/16] stops from saving project if pipeline still runs --- .../director_v2_api.py | 2 ++ .../director_v2_core.py | 15 +++++++++++++++ .../projects/projects_handlers.py | 17 +++++++++++++++++ 3 files changed, 34 insertions(+) diff --git a/services/web/server/src/simcore_service_webserver/director_v2_api.py b/services/web/server/src/simcore_service_webserver/director_v2_api.py index 92918d15500..b3a5806da14 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2_api.py +++ b/services/web/server/src/simcore_service_webserver/director_v2_api.py @@ -18,6 +18,7 @@ get_service_state, get_services, is_healthy, + is_pipeline_running, request_retrieve_dyn_service, restart, retrieve, @@ -37,6 +38,7 @@ "get_service_state", "get_services", "is_healthy", + "is_pipeline_running", "request_retrieve_dyn_service", "restart", "retrieve", diff --git a/services/web/server/src/simcore_service_webserver/director_v2_core.py b/services/web/server/src/simcore_service_webserver/director_v2_core.py index d9e3dae8a2f..9af6dede284 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2_core.py +++ b/services/web/server/src/simcore_service_webserver/director_v2_core.py @@ -211,6 +211,21 @@ async def create_or_update_pipeline( log.error("could not create pipeline from project %s: %s", project_id, exc) +@log_decorator(logger=log) +async def is_pipeline_running( + app: web.Application, user_id: PositiveInt, project_id: UUID +) -> bool: + + # TODO: make it cheaper by /computations/{project_id}/state + pipeline = await get_computation_task(app, user_id, project_id) + if pipeline is None: + # the pipeline might not exist and that is ok + # FIXME: or some error happened so ... we assume is not running?? + return False # not running + + return pipeline.state.is_running() + + @log_decorator(logger=log) async def get_computation_task( app: web.Application, user_id: PositiveInt, project_id: UUID diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index 413c0bc71d0..3d64f771c8b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -406,6 +406,23 @@ async def replace_project(request: web.Request): if current_project["accessRights"] != new_project["accessRights"]: await check_permission(request, "project.access_rights.update") + # NOTE: This measure avoid having a state with different nodes in the + # comptask table and the project's workbench column. + # The limitation is that nodeports only "sees" those in the comptask + # which are blocked while the pipeline runs. Then + # any extra link created while the pipelin is running can not + # be managed by nodeports because it basically "cannot see it" + # + # This is a conservative approach until nodeports logic is modified + # to tackle this state. + # + if await director_v2_api.is_pipeline_running( + request.app, user_id, project_uuid + ): + raise web.HTTPConflict( + reason="Project cannot be modified while pipeline is still running." + ) + new_project = await db.replace_user_project( new_project, user_id, f"{project_uuid}", include_templates=True ) From e446c955407514a1e2248502dd476d62183e1899 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 4 Feb 2022 18:35:23 +0100 Subject: [PATCH 08/16] Revert "tmp" This reverts commit 657d541ad807c09df2a2a04c3b21267ef78b95ae. --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 36b7d2b7cf8..65b6a5518eb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,6 +16,7 @@ repos: rev: v1.1.0 hooks: - id: pycln + args: [--all] - repo: https://github.com/PyCQA/isort rev: 5.6.4 hooks: From 8276811a9fc9b7f4651999b0e7e0f17b7a7b180c Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 4 Feb 2022 19:04:26 +0100 Subject: [PATCH 09/16] doc --- .../projects/projects_handlers.py | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index 3d64f771c8b..013265bbb5d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -406,19 +406,27 @@ async def replace_project(request: web.Request): if current_project["accessRights"] != new_project["accessRights"]: await check_permission(request, "project.access_rights.update") - # NOTE: This measure avoid having a state with different nodes in the - # comptask table and the project's workbench column. - # The limitation is that nodeports only "sees" those in the comptask - # which are blocked while the pipeline runs. Then - # any extra link created while the pipelin is running can not - # be managed by nodeports because it basically "cannot see it" - # - # This is a conservative approach until nodeports logic is modified - # to tackle this state. - # if await director_v2_api.is_pipeline_running( request.app, user_id, project_uuid ): + # NOTE: This is a conservative measure that we take + # until nodeports logic is re-designed to tackle with this + # particular state. + # + # This measure avoid having a state with different node *links* in the + # comp-tasks table and the project's workbench column. + # The limitation is that nodeports only "sees" those in the comptask + # and this table does not add the new ones since it remains "blocked" + # for modification from that project while the pipeline runs. Therefore + # any extra link created while the pipeline is running can not + # be managed by nodeports because it basically "cannot see it" + # + # Responds https://httpstatuses.com/409: + # The request could not be completed due to a conflict with the current + # state of the target resource (i.e. pipeline is running). This code is used in + # situations where the user might be able to resolve the conflict + # and resubmit the request (front-end will show a pop-up with message below) + # raise web.HTTPConflict( reason="Project cannot be modified while pipeline is still running." ) From b95093c4aa8a3cfb19c394de7206e31b3756b23e Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 4 Feb 2022 22:03:04 +0100 Subject: [PATCH 10/16] Minor rename and started error handling --- .../api/routes/computations.py | 102 +++++++++--------- .../core/application.py | 16 +++ .../models/schemas/comp_tasks.py | 2 +- .../integration/01/test_computation_api.py | 22 ++-- ...t_dynamic_sidecar_nodeports_integration.py | 4 +- .../tests/integration/shared_comp_utils.py | 10 +- 6 files changed, 86 insertions(+), 70 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 5c6d505a12b..69e89bf61cd 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -15,13 +15,13 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_random -from ...core.errors import PipelineNotFoundError, ProjectNotFoundError, SchedulerError +from ...core.errors import ProjectNotFoundError, SchedulerError from ...models.domains.comp_pipelines import CompPipelineAtDB from ...models.domains.comp_tasks import CompTaskAtDB from ...models.schemas.comp_tasks import ( ComputationTaskCreate, ComputationTaskDelete, - ComputationTaskOut, + ComputationTaskGet, ComputationTaskStop, ) from ...models.schemas.constants import UserID @@ -55,7 +55,7 @@ @router.post( "", summary="Create and optionally start a new computation", - response_model=ComputationTaskOut, + response_model=ComputationTaskGet, status_code=status.HTTP_201_CREATED, ) # NOTE: in case of a burst of calls to that endpoint, we might end up in a weird state. @@ -72,7 +72,7 @@ async def create_computation( ), director_client: DirectorV0Client = Depends(get_director_v0_client), scheduler: BaseCompScheduler = Depends(get_scheduler), -) -> ComputationTaskOut: +) -> ComputationTaskGet: log.debug( "User %s is creating a new computation from project %s", job.user_id, @@ -149,7 +149,7 @@ async def create_computation( ] pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) - return ComputationTaskOut( + return ComputationTaskGet( id=job.project_id, state=pipeline_state, pipeline_details=await compute_pipeline_details( @@ -168,7 +168,7 @@ async def create_computation( @router.get( "/{project_id}", summary="Returns a computation pipeline state", - response_model=ComputationTaskOut, + response_model=ComputationTaskGet, status_code=status.HTTP_202_ACCEPTED, ) async def get_computation( @@ -182,60 +182,60 @@ async def get_computation( computation_tasks: CompTasksRepository = Depends( get_repository(CompTasksRepository) ), -) -> ComputationTaskOut: - log.debug("User %s getting computation status for project %s", user_id, project_id) - try: - # check that project actually exists - await project_repo.get_project(project_id) +) -> ComputationTaskGet: + log.debug( + "User %s getting computation status for project %s", + f"{user_id=}", + f"{project_id=}", + ) - # NOTE: Here it is assumed the project exists in comp_tasks/comp_pipeline - # get the project pipeline - pipeline_at_db: CompPipelineAtDB = await computation_pipelines.get_pipeline( - project_id - ) - pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph() + # check that project actually exists + await project_repo.get_project(project_id) - # get the project task states - all_tasks: List[CompTaskAtDB] = await computation_tasks.get_all_tasks( - project_id - ) - # create the complete DAG graph - complete_dag = create_complete_dag_from_tasks(all_tasks) + # NOTE: Here it is assumed the project exists in comp_tasks/comp_pipeline + # get the project pipeline + pipeline_at_db: CompPipelineAtDB = await computation_pipelines.get_pipeline( + project_id + ) + pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph() - # filter the tasks by the effective pipeline - filtered_tasks = [ - t for t in all_tasks if str(t.node_id) in list(pipeline_dag.nodes()) - ] - pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) + # get the project task states + all_tasks: List[CompTaskAtDB] = await computation_tasks.get_all_tasks(project_id) - log.debug( - "Computational task status by user %s for project %s is %s", - user_id, - project_id, - pipeline_state, - ) + # filter the tasks by the effective pipeline + filtered_tasks = [ + t for t in all_tasks if str(t.node_id) in list(pipeline_dag.nodes()) + ] + pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) - task_out = ComputationTaskOut( - id=project_id, - state=pipeline_state, - pipeline_details=await compute_pipeline_details( - complete_dag, pipeline_dag, all_tasks - ), - url=f"{request.url.remove_query_params('user_id')}", - stop_url=f"{request.url.remove_query_params('user_id')}:stop" - if is_pipeline_running(pipeline_state) - else None, - ) - return task_out + log.debug( + "Computational task status by %s for %s has %s", + f"{user_id=}", + f"{project_id=}", + f"{pipeline_state=}", + ) - except (ProjectNotFoundError, PipelineNotFoundError) as e: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + # create the complete DAG graph + complete_dag = create_complete_dag_from_tasks(all_tasks) + pipeline_details = await compute_pipeline_details( + complete_dag, pipeline_dag, all_tasks + ) + self_url = f"{request.url.remove_query_params('user_id')}" + + task_out = ComputationTaskGet( + id=project_id, + state=pipeline_state, + pipeline_details=pipeline_details, + url=self_url, + stop_url=f"{self_url}:stop" if pipeline_state.is_running() else None, + ) + return task_out @router.post( "/{project_id}:stop", summary="Stops a computation pipeline", - response_model=ComputationTaskOut, + response_model=ComputationTaskGet, status_code=status.HTTP_202_ACCEPTED, ) async def stop_computation_project( @@ -250,7 +250,7 @@ async def stop_computation_project( get_repository(CompTasksRepository) ), scheduler: BaseCompScheduler = Depends(get_scheduler), -) -> ComputationTaskOut: +) -> ComputationTaskGet: log.debug( "User %s stopping computation for project %s", comp_task_stop.user_id, @@ -277,7 +277,7 @@ async def stop_computation_project( if is_pipeline_running(pipeline_state): await scheduler.stop_pipeline(comp_task_stop.user_id, project_id) - return ComputationTaskOut( + return ComputationTaskGet( id=project_id, state=pipeline_state, pipeline_details=await compute_pipeline_details( diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index b2982cf74c8..f1f46cbe98b 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -26,6 +26,7 @@ remote_debug, ) from ..utils.logging_utils import config_all_loggers +from .errors import PipelineNotFoundError, ProjectNotFoundError from .events import on_shutdown, on_startup from .settings import AppSettings, BootModeEnum @@ -86,8 +87,23 @@ def init_app(settings: Optional[AppSettings] = None) -> FastAPI: # setup app -- app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) + app.add_exception_handler(HTTPException, http_error_handler) app.add_exception_handler(RequestValidationError, http422_error_handler) + # director-v2 core.errors mappend into HTTP errors + app.add_exception_handler( + ProjectNotFoundError, + make_http_error_handler_for_exception( + status.HTTP_404_NOT_FOUND, ProjectNotFoundError + ), + ) + app.add_exception_handler( + PipelineNotFoundError, + make_http_error_handler_for_exception( + status.HTTP_404_NOT_FOUND, PipelineNotFoundError + ), + ) + # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy app.add_exception_handler( NotImplementedError, diff --git a/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py index 0abfe3909ec..91cf81eff93 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py @@ -9,7 +9,7 @@ from ..schemas.constants import UserID -class ComputationTaskOut(ComputationTask): +class ComputationTaskGet(ComputationTask): url: AnyHttpUrl = Field( ..., description="the link where to get the status of the task" ) diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index bef42d5a9d4..cc133adcb1c 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -33,7 +33,7 @@ create_pipeline, ) from simcore_sdk.node_ports_common import config as node_ports_config -from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskOut +from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskGet from starlette import status from starlette.testclient import TestClient from yarl import URL @@ -393,7 +393,7 @@ def _convert_to_pipeline_details( if index in params.subgraph_elements ], ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( async_client, @@ -455,7 +455,7 @@ def _convert_to_pipeline_details( ], force_restart=True, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) await assert_computation_task_out_obj( async_client, @@ -490,7 +490,7 @@ async def test_run_computation( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correct: a pipeline that just started gets PUBLISHED await assert_computation_task_out_obj( @@ -553,7 +553,7 @@ async def test_run_computation( expected_response_status_code=status.HTTP_201_CREATED, force_restart=True, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correct await assert_computation_task_out_obj( async_client, @@ -600,7 +600,7 @@ async def test_abort_computation( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -640,7 +640,7 @@ async def test_abort_computation( assert ( response.status_code == status.HTTP_202_ACCEPTED ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) assert ( str(task_out.url) == f"{async_client.base_url}/v2/computations/{sleepers_project.uuid}" @@ -678,7 +678,7 @@ async def test_update_and_delete_computation( start_pipeline=False, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -697,7 +697,7 @@ async def test_update_and_delete_computation( start_pipeline=False, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -716,7 +716,7 @@ async def test_update_and_delete_computation( start_pipeline=False, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -735,7 +735,7 @@ async def test_update_and_delete_computation( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( async_client, diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 9ac34fe9dd7..465f2899a4e 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -56,7 +56,7 @@ from simcore_sdk.node_ports_common import config as node_ports_config from simcore_sdk.node_ports_v2 import DBManager, Nodeports, Port from simcore_service_director_v2.core.settings import AppSettings, RCloneSettings -from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskOut +from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskGet from simcore_service_director_v2.models.schemas.constants import ( DYNAMIC_SIDECAR_SERVICE_PREFIX, ) @@ -853,7 +853,7 @@ async def test_nodeports_integration( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) # check the contents is correct: a pipeline that just started gets PUBLISHED await assert_computation_task_out_obj( diff --git a/services/director-v2/tests/integration/shared_comp_utils.py b/services/director-v2/tests/integration/shared_comp_utils.py index cc06b67f01c..c5b44be06de 100644 --- a/services/director-v2/tests/integration/shared_comp_utils.py +++ b/services/director-v2/tests/integration/shared_comp_utils.py @@ -10,7 +10,7 @@ from pydantic.networks import AnyHttpUrl from pydantic.types import PositiveInt from pytest_simcore.helpers.constants import MINUTE -from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskOut +from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskGet from starlette import status from tenacity._asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -46,7 +46,7 @@ async def create_pipeline( async def assert_computation_task_out_obj( client: httpx.AsyncClient, - task_out: ComputationTaskOut, + task_out: ComputationTaskGet, *, project: ProjectAtDB, exp_task_state: RunningState, @@ -70,7 +70,7 @@ async def assert_and_wait_for_pipeline_status( user_id: PositiveInt, project_uuid: UUID, wait_for_states: List[RunningState] = None, -) -> ComputationTaskOut: +) -> ComputationTaskGet: if not wait_for_states: wait_for_states = [ RunningState.SUCCESS, @@ -79,12 +79,12 @@ async def assert_and_wait_for_pipeline_status( ] MAX_TIMEOUT_S = 5 * MINUTE - async def check_pipeline_state() -> ComputationTaskOut: + async def check_pipeline_state() -> ComputationTaskGet: response = await client.get(url, params={"user_id": user_id}) assert ( response.status_code == status.HTTP_202_ACCEPTED ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationTaskOut.parse_obj(response.json()) + task_out = ComputationTaskGet.parse_obj(response.json()) assert task_out.id == project_uuid assert task_out.url == f"{client.base_url}/v2/computations/{project_uuid}" print( From fabac001b86cec6b3f656d8b82be3478012dd71c Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 7 Feb 2022 14:24:19 +0100 Subject: [PATCH 11/16] Update StartStopButtons.js --- .../class/osparc/desktop/StartStopButtons.js | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StartStopButtons.js b/services/web/client/source/class/osparc/desktop/StartStopButtons.js index 7e13b8221ba..34cd91cf930 100644 --- a/services/web/client/source/class/osparc/desktop/StartStopButtons.js +++ b/services/web/client/source/class/osparc/desktop/StartStopButtons.js @@ -208,13 +208,23 @@ qx.Class.define("osparc.desktop.StartStopButtons", { if (study) { const startButtons = this.__getStartButtons(); const stopButton = this.__stopButton; - const isPipelineRunning = study.isPipelineRunning(); - if (isPipelineRunning) { - startButtons.forEach(startButton => startButton.setFetching(true)); - stopButton.setEnabled(true); - } else { - startButtons.forEach(startButton => startButton.setFetching(false)); - stopButton.setEnabled(false); + const pipelineState = study.getPipelineState(); + if (pipelineState) { + switch (pipelineState) { + case "PENDING": + case "PUBLISHED": + case "STARTED": + startButtons.forEach(startButton => startButton.setFetching(true)); + stopButton.setEnabled(true); + break; + case "NOT_STARTED": + case "SUCCESS": + case "FAILED": + default: + startButtons.forEach(startButton => startButton.setFetching(false)); + stopButton.setEnabled(false); + break; + } } } } From 8d47f0f779aa3100325a4964cecb8c2ee4ee53db Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 7 Feb 2022 14:44:22 +0100 Subject: [PATCH 12/16] show message coming from backend when 409 --- .../web/client/source/class/osparc/desktop/StudyEditor.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StudyEditor.js b/services/web/client/source/class/osparc/desktop/StudyEditor.js index 8513048c71b..7f500d1aa43 100644 --- a/services/web/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/web/client/source/class/osparc/desktop/StudyEditor.js @@ -562,8 +562,12 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__lastSavedStudy = osparc.wrapper.JsonDiffPatch.getInstance().clone(newObj); }) .catch(error => { - console.error(error); - osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Error saving the study"), "ERROR"); + if ("status" in error && error.status === 409) { + osparc.component.message.FlashMessenger.getInstance().logAs(error.message, "ERROR"); + } else { + console.error(error); + osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Error saving the study"), "ERROR"); + } this.__getStudyLogger().error(null, "Error updating pipeline"); // Need to throw the error to be able to handle it later throw error; From 8d052e19241b2797267cf56daa069c9ce4ca3ad5 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 7 Feb 2022 14:45:37 +0100 Subject: [PATCH 13/16] Revert "Merge branch 'enh/avoid-saving-while-running' of https://github.com/pcrespov/osparc-simcore into feature/read-only-link-while-running" This reverts commit 22ddb5cc0547fd9ca1a3a985d0671929199c0928, reversing changes made to 27aa3ec96e9a5ec45951f95e7e077de9fd5b4343. --- .../src/models_library/projects_state.py | 11 -- .../api/routes/computations.py | 102 +++++++++--------- .../core/application.py | 16 --- .../models/schemas/comp_tasks.py | 2 +- .../utils/computations.py | 9 +- .../integration/01/test_computation_api.py | 22 ++-- ...t_dynamic_sidecar_nodeports_integration.py | 4 +- .../tests/integration/shared_comp_utils.py | 10 +- .../director_v2_api.py | 2 - .../director_v2_core.py | 15 --- .../projects/projects_handlers.py | 25 ----- 11 files changed, 77 insertions(+), 141 deletions(-) diff --git a/packages/models-library/src/models_library/projects_state.py b/packages/models-library/src/models_library/projects_state.py index 867d9a7fc41..b3aecce93cb 100644 --- a/packages/models-library/src/models_library/projects_state.py +++ b/packages/models-library/src/models_library/projects_state.py @@ -27,17 +27,6 @@ class RunningState(str, Enum): FAILED = "FAILED" ABORTED = "ABORTED" - def is_running(self) -> bool: - return self in ( - RunningState.PUBLISHED, - RunningState.PENDING, - RunningState.STARTED, - RunningState.RETRY, - ) - - def is_stopped(self) -> bool: - return not self.is_running() - @unique class DataState(str, Enum): diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 69e89bf61cd..5c6d505a12b 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -15,13 +15,13 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_random -from ...core.errors import ProjectNotFoundError, SchedulerError +from ...core.errors import PipelineNotFoundError, ProjectNotFoundError, SchedulerError from ...models.domains.comp_pipelines import CompPipelineAtDB from ...models.domains.comp_tasks import CompTaskAtDB from ...models.schemas.comp_tasks import ( ComputationTaskCreate, ComputationTaskDelete, - ComputationTaskGet, + ComputationTaskOut, ComputationTaskStop, ) from ...models.schemas.constants import UserID @@ -55,7 +55,7 @@ @router.post( "", summary="Create and optionally start a new computation", - response_model=ComputationTaskGet, + response_model=ComputationTaskOut, status_code=status.HTTP_201_CREATED, ) # NOTE: in case of a burst of calls to that endpoint, we might end up in a weird state. @@ -72,7 +72,7 @@ async def create_computation( ), director_client: DirectorV0Client = Depends(get_director_v0_client), scheduler: BaseCompScheduler = Depends(get_scheduler), -) -> ComputationTaskGet: +) -> ComputationTaskOut: log.debug( "User %s is creating a new computation from project %s", job.user_id, @@ -149,7 +149,7 @@ async def create_computation( ] pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) - return ComputationTaskGet( + return ComputationTaskOut( id=job.project_id, state=pipeline_state, pipeline_details=await compute_pipeline_details( @@ -168,7 +168,7 @@ async def create_computation( @router.get( "/{project_id}", summary="Returns a computation pipeline state", - response_model=ComputationTaskGet, + response_model=ComputationTaskOut, status_code=status.HTTP_202_ACCEPTED, ) async def get_computation( @@ -182,60 +182,60 @@ async def get_computation( computation_tasks: CompTasksRepository = Depends( get_repository(CompTasksRepository) ), -) -> ComputationTaskGet: - log.debug( - "User %s getting computation status for project %s", - f"{user_id=}", - f"{project_id=}", - ) +) -> ComputationTaskOut: + log.debug("User %s getting computation status for project %s", user_id, project_id) + try: + # check that project actually exists + await project_repo.get_project(project_id) - # check that project actually exists - await project_repo.get_project(project_id) + # NOTE: Here it is assumed the project exists in comp_tasks/comp_pipeline + # get the project pipeline + pipeline_at_db: CompPipelineAtDB = await computation_pipelines.get_pipeline( + project_id + ) + pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph() - # NOTE: Here it is assumed the project exists in comp_tasks/comp_pipeline - # get the project pipeline - pipeline_at_db: CompPipelineAtDB = await computation_pipelines.get_pipeline( - project_id - ) - pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph() + # get the project task states + all_tasks: List[CompTaskAtDB] = await computation_tasks.get_all_tasks( + project_id + ) + # create the complete DAG graph + complete_dag = create_complete_dag_from_tasks(all_tasks) - # get the project task states - all_tasks: List[CompTaskAtDB] = await computation_tasks.get_all_tasks(project_id) + # filter the tasks by the effective pipeline + filtered_tasks = [ + t for t in all_tasks if str(t.node_id) in list(pipeline_dag.nodes()) + ] + pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) - # filter the tasks by the effective pipeline - filtered_tasks = [ - t for t in all_tasks if str(t.node_id) in list(pipeline_dag.nodes()) - ] - pipeline_state = get_pipeline_state_from_task_states(filtered_tasks) + log.debug( + "Computational task status by user %s for project %s is %s", + user_id, + project_id, + pipeline_state, + ) - log.debug( - "Computational task status by %s for %s has %s", - f"{user_id=}", - f"{project_id=}", - f"{pipeline_state=}", - ) + task_out = ComputationTaskOut( + id=project_id, + state=pipeline_state, + pipeline_details=await compute_pipeline_details( + complete_dag, pipeline_dag, all_tasks + ), + url=f"{request.url.remove_query_params('user_id')}", + stop_url=f"{request.url.remove_query_params('user_id')}:stop" + if is_pipeline_running(pipeline_state) + else None, + ) + return task_out - # create the complete DAG graph - complete_dag = create_complete_dag_from_tasks(all_tasks) - pipeline_details = await compute_pipeline_details( - complete_dag, pipeline_dag, all_tasks - ) - self_url = f"{request.url.remove_query_params('user_id')}" - - task_out = ComputationTaskGet( - id=project_id, - state=pipeline_state, - pipeline_details=pipeline_details, - url=self_url, - stop_url=f"{self_url}:stop" if pipeline_state.is_running() else None, - ) - return task_out + except (ProjectNotFoundError, PipelineNotFoundError) as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e @router.post( "/{project_id}:stop", summary="Stops a computation pipeline", - response_model=ComputationTaskGet, + response_model=ComputationTaskOut, status_code=status.HTTP_202_ACCEPTED, ) async def stop_computation_project( @@ -250,7 +250,7 @@ async def stop_computation_project( get_repository(CompTasksRepository) ), scheduler: BaseCompScheduler = Depends(get_scheduler), -) -> ComputationTaskGet: +) -> ComputationTaskOut: log.debug( "User %s stopping computation for project %s", comp_task_stop.user_id, @@ -277,7 +277,7 @@ async def stop_computation_project( if is_pipeline_running(pipeline_state): await scheduler.stop_pipeline(comp_task_stop.user_id, project_id) - return ComputationTaskGet( + return ComputationTaskOut( id=project_id, state=pipeline_state, pipeline_details=await compute_pipeline_details( diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index f1f46cbe98b..b2982cf74c8 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -26,7 +26,6 @@ remote_debug, ) from ..utils.logging_utils import config_all_loggers -from .errors import PipelineNotFoundError, ProjectNotFoundError from .events import on_shutdown, on_startup from .settings import AppSettings, BootModeEnum @@ -87,23 +86,8 @@ def init_app(settings: Optional[AppSettings] = None) -> FastAPI: # setup app -- app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) - app.add_exception_handler(HTTPException, http_error_handler) app.add_exception_handler(RequestValidationError, http422_error_handler) - # director-v2 core.errors mappend into HTTP errors - app.add_exception_handler( - ProjectNotFoundError, - make_http_error_handler_for_exception( - status.HTTP_404_NOT_FOUND, ProjectNotFoundError - ), - ) - app.add_exception_handler( - PipelineNotFoundError, - make_http_error_handler_for_exception( - status.HTTP_404_NOT_FOUND, PipelineNotFoundError - ), - ) - # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy app.add_exception_handler( NotImplementedError, diff --git a/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py index 91cf81eff93..0abfe3909ec 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py @@ -9,7 +9,7 @@ from ..schemas.constants import UserID -class ComputationTaskGet(ComputationTask): +class ComputationTaskOut(ComputationTask): url: AnyHttpUrl = Field( ..., description="the link where to get the status of the task" ) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index 8281bf9eb4b..7a0f427386f 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -88,8 +88,13 @@ def to_node_class(service_key: str) -> NodeClass: def is_pipeline_running(pipeline_state: RunningState) -> bool: - return pipeline_state.is_running() + return pipeline_state in [ + RunningState.PUBLISHED, + RunningState.PENDING, + RunningState.STARTED, + RunningState.RETRY, + ] def is_pipeline_stopped(pipeline_state: RunningState) -> bool: - return pipeline_state.is_stopped() + return not is_pipeline_running(pipeline_state) diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index cc133adcb1c..bef42d5a9d4 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -33,7 +33,7 @@ create_pipeline, ) from simcore_sdk.node_ports_common import config as node_ports_config -from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskGet +from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskOut from starlette import status from starlette.testclient import TestClient from yarl import URL @@ -393,7 +393,7 @@ def _convert_to_pipeline_details( if index in params.subgraph_elements ], ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( async_client, @@ -455,7 +455,7 @@ def _convert_to_pipeline_details( ], force_restart=True, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) await assert_computation_task_out_obj( async_client, @@ -490,7 +490,7 @@ async def test_run_computation( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correct: a pipeline that just started gets PUBLISHED await assert_computation_task_out_obj( @@ -553,7 +553,7 @@ async def test_run_computation( expected_response_status_code=status.HTTP_201_CREATED, force_restart=True, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correct await assert_computation_task_out_obj( async_client, @@ -600,7 +600,7 @@ async def test_abort_computation( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -640,7 +640,7 @@ async def test_abort_computation( assert ( response.status_code == status.HTTP_202_ACCEPTED ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) assert ( str(task_out.url) == f"{async_client.base_url}/v2/computations/{sleepers_project.uuid}" @@ -678,7 +678,7 @@ async def test_update_and_delete_computation( start_pipeline=False, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -697,7 +697,7 @@ async def test_update_and_delete_computation( start_pipeline=False, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -716,7 +716,7 @@ async def test_update_and_delete_computation( start_pipeline=False, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( @@ -735,7 +735,7 @@ async def test_update_and_delete_computation( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correctb await assert_computation_task_out_obj( async_client, diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 465f2899a4e..9ac34fe9dd7 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -56,7 +56,7 @@ from simcore_sdk.node_ports_common import config as node_ports_config from simcore_sdk.node_ports_v2 import DBManager, Nodeports, Port from simcore_service_director_v2.core.settings import AppSettings, RCloneSettings -from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskGet +from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskOut from simcore_service_director_v2.models.schemas.constants import ( DYNAMIC_SIDECAR_SERVICE_PREFIX, ) @@ -853,7 +853,7 @@ async def test_nodeports_integration( start_pipeline=True, expected_response_status_code=status.HTTP_201_CREATED, ) - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) # check the contents is correct: a pipeline that just started gets PUBLISHED await assert_computation_task_out_obj( diff --git a/services/director-v2/tests/integration/shared_comp_utils.py b/services/director-v2/tests/integration/shared_comp_utils.py index c5b44be06de..cc06b67f01c 100644 --- a/services/director-v2/tests/integration/shared_comp_utils.py +++ b/services/director-v2/tests/integration/shared_comp_utils.py @@ -10,7 +10,7 @@ from pydantic.networks import AnyHttpUrl from pydantic.types import PositiveInt from pytest_simcore.helpers.constants import MINUTE -from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskGet +from simcore_service_director_v2.models.schemas.comp_tasks import ComputationTaskOut from starlette import status from tenacity._asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -46,7 +46,7 @@ async def create_pipeline( async def assert_computation_task_out_obj( client: httpx.AsyncClient, - task_out: ComputationTaskGet, + task_out: ComputationTaskOut, *, project: ProjectAtDB, exp_task_state: RunningState, @@ -70,7 +70,7 @@ async def assert_and_wait_for_pipeline_status( user_id: PositiveInt, project_uuid: UUID, wait_for_states: List[RunningState] = None, -) -> ComputationTaskGet: +) -> ComputationTaskOut: if not wait_for_states: wait_for_states = [ RunningState.SUCCESS, @@ -79,12 +79,12 @@ async def assert_and_wait_for_pipeline_status( ] MAX_TIMEOUT_S = 5 * MINUTE - async def check_pipeline_state() -> ComputationTaskGet: + async def check_pipeline_state() -> ComputationTaskOut: response = await client.get(url, params={"user_id": user_id}) assert ( response.status_code == status.HTTP_202_ACCEPTED ), f"response code is {response.status_code}, error: {response.text}" - task_out = ComputationTaskGet.parse_obj(response.json()) + task_out = ComputationTaskOut.parse_obj(response.json()) assert task_out.id == project_uuid assert task_out.url == f"{client.base_url}/v2/computations/{project_uuid}" print( diff --git a/services/web/server/src/simcore_service_webserver/director_v2_api.py b/services/web/server/src/simcore_service_webserver/director_v2_api.py index b3a5806da14..92918d15500 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2_api.py +++ b/services/web/server/src/simcore_service_webserver/director_v2_api.py @@ -18,7 +18,6 @@ get_service_state, get_services, is_healthy, - is_pipeline_running, request_retrieve_dyn_service, restart, retrieve, @@ -38,7 +37,6 @@ "get_service_state", "get_services", "is_healthy", - "is_pipeline_running", "request_retrieve_dyn_service", "restart", "retrieve", diff --git a/services/web/server/src/simcore_service_webserver/director_v2_core.py b/services/web/server/src/simcore_service_webserver/director_v2_core.py index 9af6dede284..d9e3dae8a2f 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2_core.py +++ b/services/web/server/src/simcore_service_webserver/director_v2_core.py @@ -211,21 +211,6 @@ async def create_or_update_pipeline( log.error("could not create pipeline from project %s: %s", project_id, exc) -@log_decorator(logger=log) -async def is_pipeline_running( - app: web.Application, user_id: PositiveInt, project_id: UUID -) -> bool: - - # TODO: make it cheaper by /computations/{project_id}/state - pipeline = await get_computation_task(app, user_id, project_id) - if pipeline is None: - # the pipeline might not exist and that is ok - # FIXME: or some error happened so ... we assume is not running?? - return False # not running - - return pipeline.state.is_running() - - @log_decorator(logger=log) async def get_computation_task( app: web.Application, user_id: PositiveInt, project_id: UUID diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index 013265bbb5d..413c0bc71d0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -406,31 +406,6 @@ async def replace_project(request: web.Request): if current_project["accessRights"] != new_project["accessRights"]: await check_permission(request, "project.access_rights.update") - if await director_v2_api.is_pipeline_running( - request.app, user_id, project_uuid - ): - # NOTE: This is a conservative measure that we take - # until nodeports logic is re-designed to tackle with this - # particular state. - # - # This measure avoid having a state with different node *links* in the - # comp-tasks table and the project's workbench column. - # The limitation is that nodeports only "sees" those in the comptask - # and this table does not add the new ones since it remains "blocked" - # for modification from that project while the pipeline runs. Therefore - # any extra link created while the pipeline is running can not - # be managed by nodeports because it basically "cannot see it" - # - # Responds https://httpstatuses.com/409: - # The request could not be completed due to a conflict with the current - # state of the target resource (i.e. pipeline is running). This code is used in - # situations where the user might be able to resolve the conflict - # and resubmit the request (front-end will show a pop-up with message below) - # - raise web.HTTPConflict( - reason="Project cannot be modified while pipeline is still running." - ) - new_project = await db.replace_user_project( new_project, user_id, f"{project_uuid}", include_templates=True ) From bc578fbec3492283292b1d738326c818499d2c0b Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Mon, 7 Feb 2022 16:01:22 +0100 Subject: [PATCH 14/16] refactoring --- .../class/osparc/desktop/StartStopButtons.js | 27 +++++-------------- 1 file changed, 6 insertions(+), 21 deletions(-) diff --git a/services/web/client/source/class/osparc/desktop/StartStopButtons.js b/services/web/client/source/class/osparc/desktop/StartStopButtons.js index 34cd91cf930..d5436ccd47c 100644 --- a/services/web/client/source/class/osparc/desktop/StartStopButtons.js +++ b/services/web/client/source/class/osparc/desktop/StartStopButtons.js @@ -197,7 +197,11 @@ qx.Class.define("osparc.desktop.StartStopButtons", { __checkButtonsVisible: function() { const allNodes = this.getStudy().getWorkbench().getNodes(true); const isRunnable = Object.values(allNodes).some(node => (node.isComputational() || node.isIterator())); - this.__getStartButtons().forEach(startBtn => startBtn.setEnabled(isRunnable)); + this.__getStartButtons().forEach(startBtn => { + if (!startBtn.isFetching()) { + startBtn.setEnabled(isRunnable); + } + }, this); const isReadOnly = this.getStudy().isReadOnly(); this.setVisibility(isReadOnly ? "excluded" : "visible"); @@ -206,26 +210,7 @@ qx.Class.define("osparc.desktop.StartStopButtons", { __updateRunButtonsStatus: function() { const study = this.getStudy(); if (study) { - const startButtons = this.__getStartButtons(); - const stopButton = this.__stopButton; - const pipelineState = study.getPipelineState(); - if (pipelineState) { - switch (pipelineState) { - case "PENDING": - case "PUBLISHED": - case "STARTED": - startButtons.forEach(startButton => startButton.setFetching(true)); - stopButton.setEnabled(true); - break; - case "NOT_STARTED": - case "SUCCESS": - case "FAILED": - default: - startButtons.forEach(startButton => startButton.setFetching(false)); - stopButton.setEnabled(false); - break; - } - } + this.setRunning(study.isPipelineRunning()); } } } From 7b52aca87047e2d836437423009fa9f5af8fabd3 Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 8 Feb 2022 08:50:34 +0100 Subject: [PATCH 15/16] e2e fix: 2D_plot --- tests/e2e/portal/2D_Plot.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/e2e/portal/2D_Plot.js b/tests/e2e/portal/2D_Plot.js index 94a3436c07c..71d992ff01d 100644 --- a/tests/e2e/portal/2D_Plot.js +++ b/tests/e2e/portal/2D_Plot.js @@ -29,8 +29,6 @@ async function runTutorial () { await tutorial.waitFor(5000, 'Some time for starting the service'); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); - await tutorial.openNode(1); - await tutorial.waitFor(2000); await utils.takeScreenshot(page, screenshotPrefix + 'iFrame0'); From 5b78298671c54be9a56c54166f1551bd0aa25aae Mon Sep 17 00:00:00 2001 From: odeimaiz Date: Tue, 8 Feb 2022 09:20:36 +0100 Subject: [PATCH 16/16] revert --- services/web/client/source/class/osparc/data/model/Study.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/web/client/source/class/osparc/data/model/Study.js b/services/web/client/source/class/osparc/data/model/Study.js index 3b00e5c1558..60754e7401d 100644 --- a/services/web/client/source/class/osparc/data/model/Study.js +++ b/services/web/client/source/class/osparc/data/model/Study.js @@ -57,7 +57,7 @@ qx.Class.define("osparc.data.model.Study", { quality: studyData.quality || this.getQuality() }); - const wbData = studyData.workbench || {}; + const wbData = studyData.workbench || this.getWorkbench(); const workbench = new osparc.data.model.Workbench(wbData, studyData.ui); this.setWorkbench(workbench); workbench.setStudy(this); @@ -126,7 +126,7 @@ qx.Class.define("osparc.data.model.Study", { workbench: { check: "osparc.data.model.Workbench", nullable: false, - init: null + init: {} }, ui: {