From 1c68b65b19c5918b41a8b0f7ebaec4f7badf85d1 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Apr 2022 11:05:23 +0200 Subject: [PATCH 01/19] WIP: split project copy [skip CI] --- .../projects/_copy.py | 110 ++++++++++++++++++ .../projects/projects_api.py | 24 ---- 2 files changed, 110 insertions(+), 24 deletions(-) create mode 100644 services/web/server/src/simcore_service_webserver/projects/_copy.py diff --git a/services/web/server/src/simcore_service_webserver/projects/_copy.py b/services/web/server/src/simcore_service_webserver/projects/_copy.py new file mode 100644 index 00000000000..c1140554f81 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_copy.py @@ -0,0 +1,110 @@ +""" Implementation of project copy operations + + +""" + +import logging +from copy import deepcopy +from typing import Dict, List, Optional, Tuple, Union +from uuid import UUID, uuid1, uuid5 + +from aiohttp import web + +from ..storage_api import copy_data_folders_from_project + +log = logging.getLogger(__name__) + + +def clone_project_document( + project: Dict, + *, + forced_copy_project_id: Optional[UUID] = None, + clean_output_data: bool = False, +) -> Tuple[Dict, Dict]: + project_copy = deepcopy(project) + + # Update project id + # NOTE: this can be re-assigned by dbapi if not unique + if forced_copy_project_id: + assert isinstance(forced_copy_project_id, UUID) # nosec + project_copy_uuid = forced_copy_project_id + else: + project_copy_uuid = uuid1() # random project id + + assert project_copy_uuid # nosec + + project_copy["uuid"] = str(project_copy_uuid) + + # Workbench nodes shall be unique within the project context + def _create_new_node_uuid(old_uuid): + return str(uuid5(project_copy_uuid, str(old_uuid))) + + nodes_map = {} + for node_uuid in project.get("workbench", {}).keys(): + nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) + + project_map = {project["uuid"]: project_copy["uuid"]} + + def _replace_uuids(node: Union[str, List, Dict]) -> Union[str, List, Dict]: + if isinstance(node, str): + # NOTE: for datasets we get something like project_uuid/node_uuid/file_id + if "/" in node: + parts = node.split("/") + node = "/".join(_replace_uuids(part) for part in parts) + else: + node = project_map.get(node, nodes_map.get(node, node)) + elif isinstance(node, list): + node = [_replace_uuids(item) for item in node] + elif isinstance(node, dict): + _frozen_items = tuple(node.items()) + for key, value in _frozen_items: + if key in nodes_map: + new_key = nodes_map[key] + node[new_key] = node.pop(key) + key = new_key + + node[key] = _replace_uuids(value) + return node + + project_copy["workbench"] = _replace_uuids(project_copy.get("workbench", {})) + if "ui" in project_copy: + project_copy["ui"]["workbench"] = _replace_uuids( + project_copy["ui"].get("workbench", {}) + ) + project_copy["ui"]["slideshow"] = _replace_uuids( + project_copy["ui"].get("slideshow", {}) + ) + if "mode" in project_copy["ui"]: + project_copy["ui"]["mode"] = project_copy["ui"]["mode"] + if clean_output_data: + FIELDS_TO_DELETE = ("outputs", "progress", "runHash") + for node_data in project_copy.get("workbench", {}).values(): + for field in FIELDS_TO_DELETE: + node_data.pop(field, None) + return project_copy, nodes_map + + +async def clone_project( + app: web.Application, project: Dict, user_id: int, forced_copy_project_id: str = "" +) -> Dict: + """Clones both document and data folders of a project + + - document + - get new identifiers for project and nodes + - data folders + - folder name composes as project_uuid/node_uuid + - data is deep-copied to new folder corresponding to new identifiers + - managed by storage uservice + """ + + # + # NOTE: Needs refactoring after access-layer in storage. DO NOT USE but keep + # here since it documents well the concept + + cloned_project, nodes_map = clone_project_document(project, forced_copy_project_id) + + updated_project = await copy_data_folders_from_project( + app, project, cloned_project, nodes_map, user_id + ) + + return updated_project diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 8b0408389ef..3aadf7e2e74 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -110,30 +110,6 @@ async def get_project_for_user( return project -# NOTE: Needs refactoring after access-layer in storage. DO NOT USE but keep -# here since it documents well the concept -# -# async def clone_project( -# request: web.Request, project: Dict, user_id: int, forced_copy_project_id: str = "" -# ) -> Dict: -# """Clones both document and data folders of a project -# -# - document -# - get new identifiers for project and nodes -# - data folders -# - folder name composes as project_uuid/node_uuid -# - data is deep-copied to new folder corresponding to new identifiers -# - managed by storage uservice -# """ -# cloned_project, nodes_map = clone_project_document(project, forced_copy_project_id) -# -# updated_project = await copy_data_folders_from_project( -# request.app, project, cloned_project, nodes_map, user_id -# ) -# -# return updated_project - - async def start_project_interactive_services( request: web.Request, project: Dict, user_id: PositiveInt ) -> None: From a818c421973e4886a32bb5c98cefd62564ba8ced Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Apr 2022 16:11:43 +0200 Subject: [PATCH 02/19] new basic type --- .../src/models_library/basic_types.py | 5 ++++- .../models-library/tests/test_basic_types.py | 18 ++++++++++++++++++ .../server/tests/integration/02/test_rabbit.py | 3 ++- 3 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 packages/models-library/tests/test_basic_types.py diff --git a/packages/models-library/src/models_library/basic_types.py b/packages/models-library/src/models_library/basic_types.py index 8705f63a362..bc96d9f582b 100644 --- a/packages/models-library/src/models_library/basic_types.py +++ b/packages/models-library/src/models_library/basic_types.py @@ -2,7 +2,7 @@ from pydantic import conint, constr -from .basic_regex import VERSION_RE +from .basic_regex import UUID_RE, VERSION_RE # port number range PortInt = conint(gt=0, lt=65535) @@ -20,6 +20,9 @@ # env var EnvVarKey = constr(regex=r"[a-zA-Z][a-azA-Z0-9_]*") +# e.g. +UUIDStr = constr(regex=UUID_RE) + class LogLevel(str, Enum): DEBUG = "DEBUG" diff --git a/packages/models-library/tests/test_basic_types.py b/packages/models-library/tests/test_basic_types.py new file mode 100644 index 00000000000..5f8317eab73 --- /dev/null +++ b/packages/models-library/tests/test_basic_types.py @@ -0,0 +1,18 @@ +import pytest +from faker import Faker +from models_library.basic_types import UUIDStr +from pydantic import ValidationError +from pydantic.tools import parse_obj_as + + +@pytest.mark.skip(reason="DEV: testing parse_obj_as") +def test_parse_uuid_as_a_string(faker: Faker): + + expected_uuid = faker.uuid4() + got_uuid = parse_obj_as(UUIDStr, expected_uuid) + + assert isinstance(got_uuid, str) + assert got_uuid == expected_uuid + + with pytest.raises(ValidationError): + parse_obj_as(UUIDStr, "123456-is-not-an-uuid") diff --git a/services/web/server/tests/integration/02/test_rabbit.py b/services/web/server/tests/integration/02/test_rabbit.py index c2c937f6234..0e0fc6167f4 100644 --- a/services/web/server/tests/integration/02/test_rabbit.py +++ b/services/web/server/tests/integration/02/test_rabbit.py @@ -14,6 +14,7 @@ import socketio import sqlalchemy as sa from faker.proxy import Faker +from models_library.basic_types import UUIDStr from models_library.projects_state import RunningState from models_library.rabbitmq_messages import ( InstrumentationRabbitMessage, @@ -59,7 +60,7 @@ logger = logging.getLogger(__name__) -UUIDStr = str + RabbitMessage = Dict[str, Any] LogMessages = List[LoggerRabbitMessage] InstrumMessages = List[InstrumentationRabbitMessage] From 96bdcb96ff8db24d8b0a660aab7da3705deb8884 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Apr 2022 17:32:22 +0200 Subject: [PATCH 03/19] rename because copy is a create operation --- .../projects/_copy.py | 110 ------------- .../projects/_create.py | 149 ++++++++++++++++++ 2 files changed, 149 insertions(+), 110 deletions(-) delete mode 100644 services/web/server/src/simcore_service_webserver/projects/_copy.py create mode 100644 services/web/server/src/simcore_service_webserver/projects/_create.py diff --git a/services/web/server/src/simcore_service_webserver/projects/_copy.py b/services/web/server/src/simcore_service_webserver/projects/_copy.py deleted file mode 100644 index c1140554f81..00000000000 --- a/services/web/server/src/simcore_service_webserver/projects/_copy.py +++ /dev/null @@ -1,110 +0,0 @@ -""" Implementation of project copy operations - - -""" - -import logging -from copy import deepcopy -from typing import Dict, List, Optional, Tuple, Union -from uuid import UUID, uuid1, uuid5 - -from aiohttp import web - -from ..storage_api import copy_data_folders_from_project - -log = logging.getLogger(__name__) - - -def clone_project_document( - project: Dict, - *, - forced_copy_project_id: Optional[UUID] = None, - clean_output_data: bool = False, -) -> Tuple[Dict, Dict]: - project_copy = deepcopy(project) - - # Update project id - # NOTE: this can be re-assigned by dbapi if not unique - if forced_copy_project_id: - assert isinstance(forced_copy_project_id, UUID) # nosec - project_copy_uuid = forced_copy_project_id - else: - project_copy_uuid = uuid1() # random project id - - assert project_copy_uuid # nosec - - project_copy["uuid"] = str(project_copy_uuid) - - # Workbench nodes shall be unique within the project context - def _create_new_node_uuid(old_uuid): - return str(uuid5(project_copy_uuid, str(old_uuid))) - - nodes_map = {} - for node_uuid in project.get("workbench", {}).keys(): - nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) - - project_map = {project["uuid"]: project_copy["uuid"]} - - def _replace_uuids(node: Union[str, List, Dict]) -> Union[str, List, Dict]: - if isinstance(node, str): - # NOTE: for datasets we get something like project_uuid/node_uuid/file_id - if "/" in node: - parts = node.split("/") - node = "/".join(_replace_uuids(part) for part in parts) - else: - node = project_map.get(node, nodes_map.get(node, node)) - elif isinstance(node, list): - node = [_replace_uuids(item) for item in node] - elif isinstance(node, dict): - _frozen_items = tuple(node.items()) - for key, value in _frozen_items: - if key in nodes_map: - new_key = nodes_map[key] - node[new_key] = node.pop(key) - key = new_key - - node[key] = _replace_uuids(value) - return node - - project_copy["workbench"] = _replace_uuids(project_copy.get("workbench", {})) - if "ui" in project_copy: - project_copy["ui"]["workbench"] = _replace_uuids( - project_copy["ui"].get("workbench", {}) - ) - project_copy["ui"]["slideshow"] = _replace_uuids( - project_copy["ui"].get("slideshow", {}) - ) - if "mode" in project_copy["ui"]: - project_copy["ui"]["mode"] = project_copy["ui"]["mode"] - if clean_output_data: - FIELDS_TO_DELETE = ("outputs", "progress", "runHash") - for node_data in project_copy.get("workbench", {}).values(): - for field in FIELDS_TO_DELETE: - node_data.pop(field, None) - return project_copy, nodes_map - - -async def clone_project( - app: web.Application, project: Dict, user_id: int, forced_copy_project_id: str = "" -) -> Dict: - """Clones both document and data folders of a project - - - document - - get new identifiers for project and nodes - - data folders - - folder name composes as project_uuid/node_uuid - - data is deep-copied to new folder corresponding to new identifiers - - managed by storage uservice - """ - - # - # NOTE: Needs refactoring after access-layer in storage. DO NOT USE but keep - # here since it documents well the concept - - cloned_project, nodes_map = clone_project_document(project, forced_copy_project_id) - - updated_project = await copy_data_folders_from_project( - app, project, cloned_project, nodes_map, user_id - ) - - return updated_project diff --git a/services/web/server/src/simcore_service_webserver/projects/_create.py b/services/web/server/src/simcore_service_webserver/projects/_create.py new file mode 100644 index 00000000000..087e783a3f9 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_create.py @@ -0,0 +1,149 @@ +""" Implementations of project creations + +""" + +import logging +from copy import deepcopy +from typing import Any, Dict, Optional, Tuple +from uuid import UUID, uuid1, uuid5 + +from aiohttp import web +from models_library.basic_types import UUIDStr +from models_library.users import UserID + +from ..storage_api import copy_data_folders_from_project +from .project_models import ProjectDict +from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI + +log = logging.getLogger(__name__) + +NodesMap = Dict[UUIDStr, UUIDStr] + + +def _replace_uuids(entity: Any, project_map, nodes_map) -> Any: + + if isinstance(entity, str): + # NOTE: for datasets we get something like project_uuid/node_uuid/file_id + if "/" in entity: + parts = entity.split("/") + entity = "/".join( + _replace_uuids(part, project_map, nodes_map) for part in parts + ) + else: + entity = project_map.get(entity, nodes_map.get(entity, entity)) + elif isinstance(entity, list): + entity = [_replace_uuids(item, project_map, nodes_map) for item in entity] + + elif isinstance(entity, dict): + _frozen_items = tuple(entity.items()) + for key, value in _frozen_items: + if key in nodes_map: + new_key = nodes_map[key] + entity[new_key] = entity.pop(key) + key = new_key + + entity[key] = _replace_uuids(value, project_map, nodes_map) + return entity + + +def _clone_project_from( + project: ProjectDict, + *, + new_project_id: Optional[UUID], + clean_output_data: bool = False, +) -> Tuple[ProjectDict, NodesMap]: + """ + Deep copies a project dataset and modifies: + - different project ids + - different node ids + - w/ or w/o outputs + """ + # + # TODO: not robust to changes in project schema + # + + project_copy = deepcopy(project) + + # Update project id + # NOTE: this can be re-assigned by dbapi if not unique + if new_project_id: + assert isinstance(new_project_id, UUID) # nosec + project_copy_uuid = new_project_id + else: + project_copy_uuid = uuid1() # random project id + + project_copy["uuid"] = str(project_copy_uuid) + + # Workbench nodes shall be unique within the project context + def _create_new_node_uuid(old_uuid): + return str(uuid5(project_copy_uuid, str(old_uuid))) + + nodes_map = {} + for node_uuid in project.get("workbench", {}).keys(): + nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) + + project_map = {project["uuid"]: project_copy["uuid"]} + + project_copy["workbench"] = _replace_uuids( + project_copy.get("workbench", {}), project_map, nodes_map + ) + if "ui" in project_copy: + project_copy["ui"]["workbench"] = _replace_uuids( + project_copy["ui"].get("workbench", {}), project_map, nodes_map + ) + project_copy["ui"]["slideshow"] = _replace_uuids( + project_copy["ui"].get("slideshow", {}), project_map, nodes_map + ) + if "mode" in project_copy["ui"]: + project_copy["ui"]["mode"] = project_copy["ui"]["mode"] + + if clean_output_data: + FIELDS_TO_DELETE = ("outputs", "progress", "runHash") + for node_data in project_copy.get("workbench", {}).values(): + for field in FIELDS_TO_DELETE: + node_data.pop(field, None) + + return project_copy, nodes_map + + +async def copy_project( + app: web.Application, + project: ProjectDict, + user_id: UserID, + new_project_id: Optional[UUID] = None, +) -> ProjectDict: + """Clones both document and data folders of a project + + - document + - get new identifiers for project and nodes + - data folders + - folder name composes as project_uuid/node_uuid + - data is deep-copied to new folder corresponding to new identifiers + - managed by storage uservice + """ + # TODO: set as invisible and set visible when copied so it can be used? + # TODO: atomic operation? + + db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + + # creates clone + project_copy, nodes_map = _clone_project_from( + project, + new_project_id=new_project_id, + clean_output_data=False, + ) + + # inject in db + await db.add_project(project_copy, user_id, force_project_uuid=True) + + # access writes apply again in + updated_project = await copy_data_folders_from_project( + app, project, project_copy, nodes_map, user_id + ) + + assert updated_project == project_copy # nosec + + return updated_project + + +# def create_project( source_project: ProjectID, ) From a749938ff7ba0d9f15ff9b2e280c40082e0164a2 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Apr 2022 17:32:32 +0200 Subject: [PATCH 04/19] doc and parse query --- .../projects/projects_handlers_crud.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py index f630e0ee0fd..8c74e660013 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py @@ -11,7 +11,6 @@ and the acronym CRUD states for Create+Read(Get&List)+Update+Delete """ - import asyncio import json import logging @@ -20,10 +19,12 @@ from aiohttp import web from jsonschema import ValidationError +from models_library.basic_types import UUIDStr from models_library.projects import ProjectID from models_library.projects_state import ProjectStatus from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data +from pydantic import parse_obj_as from servicelib.utils import logged_gather from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB @@ -68,28 +69,31 @@ routes = web.RouteTableDef() - @routes.post(f"/{VTAG}/projects") @login_required @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db async def create_projects( request: web.Request, -): # pylint: disable=too-many-branches, too-many-statements +): + # pylint: disable=too-many-branches, too-many-statements + + # request context params user_id: int = request[RQT_USERID_KEY] db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] - template_uuid = request.query.get("from_template") - as_template = request.query.get("as_template") - copy_data: bool = bool( - request.query.get("copy_data", "true") in [1, "true", "True"] - ) - hidden: bool = bool(request.query.get("hidden", False)) new_project = {} - new_project_was_hidden_before_data_was_copied = hidden try: + # query params + template_uuid: Optional[UUIDStr] = request.query.get("from_template") + as_template: Optional[UUIDStr] = request.query.get("as_template") + copy_data: bool = parse_obj_as(bool, request.query.get("copy_data", True)) + hidden: bool = parse_obj_as(bool, request.query.get("hidden", False)) + + new_project_was_hidden_before_data_was_copied = hidden + clone_data_coro: Optional[Coroutine] = None - source_project: Optional[Dict[str, Any]] = None + source_project: Optional[ProjectDict] = None if as_template: # create template from await check_permission(request, "project.template.create") source_project = await projects_api.get_project_for_user( @@ -104,6 +108,7 @@ async def create_projects( raise web.HTTPNotFound( reason="Invalid template uuid {}".format(template_uuid) ) + if source_project: # clone template as user project new_project, nodes_map = clone_project_document( From 0cbcd2df6d0936a32174270dad8672d162df1b14 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Sun, 10 Apr 2022 01:19:46 +0200 Subject: [PATCH 05/19] query validation --- .../projects/projects_handlers.py | 2 +- .../projects/projects_handlers_crud.py | 60 ++++++++++--------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index 988216a5f66..bc26da782e9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -25,7 +25,7 @@ log = logging.getLogger(__name__) # -# Singleton per-user resources https://google.aip.dev/156 +# Singleton per-session resources https://google.aip.dev/156 # diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py index 8c74e660013..ad1d70eed0d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py @@ -18,13 +18,13 @@ from uuid import UUID from aiohttp import web -from jsonschema import ValidationError +from jsonschema import ValidationError as JsonSchemaValidationError from models_library.basic_types import UUIDStr from models_library.projects import ProjectID from models_library.projects_state import ProjectStatus from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data -from pydantic import parse_obj_as +from pydantic import BaseModel, ValidationError from servicelib.utils import logged_gather from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB @@ -69,44 +69,50 @@ routes = web.RouteTableDef() + +class ProjectCreateQuery(BaseModel): + template_uuid: Optional[UUIDStr] = None + as_template: Optional[UUIDStr] = None + copy_data: bool = True + hidden: bool = False + + @routes.post(f"/{VTAG}/projects") @login_required @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db -async def create_projects( - request: web.Request, -): +async def create_projects(request: web.Request): # pylint: disable=too-many-branches, too-many-statements # request context params user_id: int = request[RQT_USERID_KEY] - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + db: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] - new_project = {} + # query params try: - # query params - template_uuid: Optional[UUIDStr] = request.query.get("from_template") - as_template: Optional[UUIDStr] = request.query.get("as_template") - copy_data: bool = parse_obj_as(bool, request.query.get("copy_data", True)) - hidden: bool = parse_obj_as(bool, request.query.get("hidden", False)) + q = ProjectCreateQuery.parse_obj(*request.query) + except ValidationError as err: + raise web.HTTPBadRequest(reason=f"Invalid query parameters: {err}") - new_project_was_hidden_before_data_was_copied = hidden + new_project = {} + try: + new_project_was_hidden_before_data_was_copied = q.hidden clone_data_coro: Optional[Coroutine] = None source_project: Optional[ProjectDict] = None - if as_template: # create template from + if q.as_template: # create template from await check_permission(request, "project.template.create") source_project = await projects_api.get_project_for_user( request.app, - project_uuid=as_template, + project_uuid=q.as_template, user_id=user_id, include_templates=False, ) - elif template_uuid: # create from template - source_project = await db.get_template_project(template_uuid) + elif q.template_uuid: # create from template + source_project = await db.get_template_project(q.template_uuid) if not source_project: raise web.HTTPNotFound( - reason="Invalid template uuid {}".format(template_uuid) + reason="Invalid template uuid {}".format(q.template_uuid) ) if source_project: @@ -114,18 +120,18 @@ async def create_projects( new_project, nodes_map = clone_project_document( source_project, forced_copy_project_id=None, - clean_output_data=(copy_data == False), + clean_output_data=(q.copy_data == False), ) - if template_uuid: + if q.template_uuid: # remove template access rights new_project["accessRights"] = {} # the project is to be hidden until the data is copied - hidden = copy_data + q.hidden = q.copy_data clone_data_coro = ( copy_data_folders_from_project( request.app, source_project, new_project, nodes_map, user_id ) - if copy_data + if q.copy_data else None ) # FIXME: parameterized inputs should get defaults provided by service @@ -149,14 +155,14 @@ async def create_projects( new_project = await db.add_project( new_project, user_id, - force_as_template=as_template is not None, - hidden=hidden, + force_as_template=q.as_template is not None, + hidden=q.hidden, ) # copies the project's DATA IF cloned if clone_data_coro: assert source_project # nosec - if as_template: + if q.as_template: # we need to lock the original study while copying the data async with projects_api.lock_with_notification( request.app, @@ -188,11 +194,11 @@ async def create_projects( new_project = await projects_api.add_project_states_for_user( user_id=user_id, project=new_project, - is_template=as_template is not None, + is_template=q.as_template is not None, app=request.app, ) - except ValidationError as exc: + except JsonSchemaValidationError as exc: raise web.HTTPBadRequest(reason="Invalid project data") from exc except ProjectNotFoundError as exc: raise web.HTTPNotFound(reason="Project not found") from exc From 363b4f099ee0421e7447dd9fd462a10f69d7d092 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Sun, 10 Apr 2022 01:47:37 +0200 Subject: [PATCH 06/19] fixes returns in handlers and doc --- .../projects/projects_handlers_crud.py | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py index ad1d70eed0d..f97393e54f7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py @@ -1,15 +1,6 @@ """ Handlers for STANDARD methods on /projects colletions -Standard methods are -- Get https://google.aip.dev/131 -- List https://google.aip.dev/132 -- Create https://google.aip.dev/133 -- Update https://google.aip.dev/134 -- Delete https://google.aip.dev/135 - -and the acronym CRUD states for Create+Read(Get&List)+Update+Delete - """ import asyncio import json @@ -25,6 +16,7 @@ from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data from pydantic import BaseModel, ValidationError +from servicelib.json_serialization import json_dumps from servicelib.utils import logged_gather from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB @@ -69,8 +61,15 @@ routes = web.RouteTableDef() - -class ProjectCreateQuery(BaseModel): +# +# Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete +# - Get https://google.aip.dev/131 +# - List https://google.aip.dev/132 +# - Create https://google.aip.dev/133 +# - Update https://google.aip.dev/134 +# - Delete https://google.aip.dev/135 +# +class _ProjectCreateQuery(BaseModel): template_uuid: Optional[UUIDStr] = None as_template: Optional[UUIDStr] = None copy_data: bool = True @@ -82,7 +81,8 @@ class ProjectCreateQuery(BaseModel): @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db async def create_projects(request: web.Request): - # pylint: disable=too-many-branches, too-many-statements + # FIXME: too-many-branches + # FIXME: too-many-statements # request context params user_id: int = request[RQT_USERID_KEY] @@ -90,7 +90,7 @@ async def create_projects(request: web.Request): # query params try: - q = ProjectCreateQuery.parse_obj(*request.query) + q = _ProjectCreateQuery.parse_obj(*request.query) except ValidationError as err: raise web.HTTPBadRequest(reason=f"Invalid query parameters: {err}") @@ -238,7 +238,7 @@ async def list_projects(request: web.Request): db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] async def set_all_project_states( - projects: List[Dict[str, Any]], project_types: List[bool] + projects: List[Dict[str, Any]], project_types: List[ProjectTypeDB] ): await logged_gather( *[ @@ -278,7 +278,10 @@ async def set_all_project_states( offset=offset, ) ) - return page.dict(**RESPONSE_MODEL_POLICY) + return web.Response( + text=page.json(**RESPONSE_MODEL_POLICY), + content_type="application/json", + ) @routes.get(f"/{VTAG}/projects/{{project_uuid}}") @@ -325,7 +328,7 @@ async def get_project(request: web.Request): if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): project["uuid"] = new_uuid - return {"data": project} + return web.json_response({"data": project}, dumps=json_dumps) except ProjectInvalidRightsError as exc: raise web.HTTPForbidden( @@ -440,7 +443,7 @@ async def replace_project(request: web.Request): app=request.app, ) - except ValidationError as exc: + except JsonSchemaValidationError as exc: raise web.HTTPBadRequest( reason=f"Invalid project update: {exc.message}" ) from exc @@ -453,7 +456,7 @@ async def replace_project(request: web.Request): except ProjectNotFoundError as exc: raise web.HTTPNotFound from exc - return {"data": new_project} + return web.json_response({"data": new_project}, dumps=json_dumps) @routes.delete(f"/{VTAG}/projects/{{project_uuid}}") From ef0fc0b930a3398ce87ab1aaffbf8207764ea0e5 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Tue, 19 Apr 2022 20:35:11 +0200 Subject: [PATCH 07/19] cleanup --- packages/models-library/src/models_library/basic_types.py | 2 +- services/web/server/src/simcore_service_webserver/utils.py | 2 +- services/web/server/tests/unit/with_dbs/_helpers.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/basic_types.py b/packages/models-library/src/models_library/basic_types.py index bc96d9f582b..89e8fd68c91 100644 --- a/packages/models-library/src/models_library/basic_types.py +++ b/packages/models-library/src/models_library/basic_types.py @@ -20,7 +20,7 @@ # env var EnvVarKey = constr(regex=r"[a-zA-Z][a-azA-Z0-9_]*") -# e.g. +# e.g. '5c833a78-1af3-43a7-9ed7-6a63b188f4d8' UUIDStr = constr(regex=UUID_RE) diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py index 22aaf9b0ce8..4b76b514678 100644 --- a/services/web/server/src/simcore_service_webserver/utils.py +++ b/services/web/server/src/simcore_service_webserver/utils.py @@ -145,7 +145,7 @@ def compose_error_msg(msg: str) -> str: def snake_to_camel(subject: str) -> str: parts = subject.lower().split("_") - return parts[0] + "".join(x.title() for x in parts[1:]) + return parts[0] + "".join(word.title() for word in parts[1:]) # ----------------------------------------------- diff --git a/services/web/server/tests/unit/with_dbs/_helpers.py b/services/web/server/tests/unit/with_dbs/_helpers.py index 824695c98a2..615e3ad1021 100644 --- a/services/web/server/tests/unit/with_dbs/_helpers.py +++ b/services/web/server/tests/unit/with_dbs/_helpers.py @@ -34,6 +34,7 @@ class ExpectedResponse(NamedTuple): ] def __str__(self) -> str: + # pylint: disable=protected-access items = ",".join(f"{k}={v.__name__}" for k, v in self._asdict().items()) return f"{self.__class__.__name__}({items})" From ad709a8c26f5ef56ff0ce10a4b03c4f8759189c2 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 20 Apr 2022 10:07:00 +0200 Subject: [PATCH 08/19] WIP --- .../projects/projects_handlers_crud.py | 60 ++++++- .../unit/with_dbs/02/test_projects__create.py | 58 ++++++ .../02/test_projects_handlers_create.py | 166 ++++++++++++++++++ 3 files changed, 283 insertions(+), 1 deletion(-) create mode 100644 services/web/server/tests/unit/with_dbs/02/test_projects__create.py create mode 100644 services/web/server/tests/unit/with_dbs/02/test_projects_handlers_create.py diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py index f97393e54f7..0d620391c47 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py @@ -5,17 +5,20 @@ import asyncio import json import logging +from dataclasses import Field from typing import Any, Coroutine, Dict, List, Optional, Set from uuid import UUID +import orjson from aiohttp import web from jsonschema import ValidationError as JsonSchemaValidationError from models_library.basic_types import UUIDStr from models_library.projects import ProjectID +from models_library.projects_access import AccessRights, GroupIDStr from models_library.projects_state import ProjectStatus from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, ValidationError +from pydantic import BaseModel, EmailStr, Extra, Field, HttpUrl, ValidationError from servicelib.json_serialization import json_dumps from servicelib.utils import logged_gather from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB @@ -30,6 +33,7 @@ from ..security_decorators import permission_required from ..storage_api import copy_data_folders_from_project from ..users_api import get_user_name +from ..utils import snake_to_camel from . import projects_api from .project_models import ProjectDict, ProjectTypeAPI from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI @@ -69,6 +73,9 @@ # - Update https://google.aip.dev/134 # - Delete https://google.aip.dev/135 # + + +# API MODELS ------------------------ class _ProjectCreateQuery(BaseModel): template_uuid: Optional[UUIDStr] = None as_template: Optional[UUIDStr] = None @@ -76,11 +83,61 @@ class _ProjectCreateQuery(BaseModel): hidden: bool = False +class ProjectCreate(BaseModel): + """ + -> POST /projects (ProjectCreate) + + - resource ID (i.e. project's uuid) is defined in the *backend* on creation + + """ + + name: str + description: str + thumbnail: Optional[HttpUrl] = None + + # TODO: why these are necessary? + prj_owner: EmailStr = Field(..., description="user's email of owner") + access_rights: Dict[GroupIDStr, AccessRights] = Field(...) + + class Config: + extra = Extra.ignore # error tolerant + alias_generator = snake_to_camel + json_loads = orjson.loads + json_dumps = json_dumps + + +class ProjectGet(BaseModel): + name: str + description: str + thumbnail: HttpUrl = "" + prj_owner: EmailStr = Field(..., description="user's email of owner") + access_rights: Dict[GroupIDStr, AccessRights] = Field(...) + + class Config: + extra = Extra.allow + alias_generator = snake_to_camel + json_dumps = json_dumps + + +# HANDLERS ------------------------ + + @routes.post(f"/{VTAG}/projects") @login_required @permission_required("project.create") @permission_required("services.pipeline.*") # due to update_pipeline_db async def create_projects(request: web.Request): + """ + + :raises web.HTTPBadRequest + :raises web.HTTPNotFound + :raises web.HTTPBadRequest + :raises web.HTTPNotFound + :raises web.HTTPUnauthorized + :raises web.HTTPCreated + """ + # SEE https://google.aip.dev/133 + # FIXME: too-many-branches # FIXME: too-many-statements @@ -92,6 +149,7 @@ async def create_projects(request: web.Request): try: q = _ProjectCreateQuery.parse_obj(*request.query) except ValidationError as err: + # TODO: impove error. which parameter failed and why? raise web.HTTPBadRequest(reason=f"Invalid query parameters: {err}") new_project = {} diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__create.py b/services/web/server/tests/unit/with_dbs/02/test_projects__create.py new file mode 100644 index 00000000000..eb644e8d6b3 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__create.py @@ -0,0 +1,58 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from uuid import UUID + +import pytest +from models_library.projects import Workbench +from pydantic import BaseModel +from simcore_service_webserver.projects import _create + +# HELPERS ----------------------------------------------------------------------------------------- + + +class ProjectSelectDB(BaseModel): + workbench: Workbench + + +class ProjectRepo: + def get(self, uuid): + pass + + +# TESTS ----------------------------------------------------------------------------------------- + + +@pytest.mark.skip(reason="UNDER DEV") +def test_clone_project(project_uuid: UUID): + # a project in db + repo = ProjectRepo() + + # fetch from db -> model + project: ProjectSelectDB = repo.get(uuid=project_uuid) + + # user copy & update + + # update all references to NodeID using a nodes_map + new_workbench: Workbench = project.workbench.copy(exclude_unset=True) + + # make a copy & update + project_clone = project.copy( + update={"workbench": new_workbench}, exclude_unset=True + ) + + # convert cloned ProjectSelectDB -> ProjectInsertDB + # retrieve ProjectSelectDB -> ProjectGet + # + + +@pytest.mark.skip(reason="UNDER DEV") +def test_long_running_copy_project(heavy_project_uuid): + + task = _create.schedule_task(app, user_id, heavy_project_uuid) + + # TODO: implement a generic response for long running operation from a asyncio.Task + # https://google.aip.dev/151 diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_handlers_create.py b/services/web/server/tests/unit/with_dbs/02/test_projects_handlers_create.py new file mode 100644 index 00000000000..4330b8f7cd3 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_handlers_create.py @@ -0,0 +1,166 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from typing import Any, Callable, Dict, List, Optional + +import pytest +from _helpers import ExpectedResponse, MockedStorageSubsystem, standard_role_response +from aiohttp import web +from aiohttp.test_utils import TestClient +from faker import Faker +from models_library.basic_types import UUIDStr +from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_login import UserInfoDict +from simcore_service_webserver._meta import api_version_prefix +from simcore_service_webserver.projects.projects_handlers_crud import ( + ProjectCreate, + ProjectGet, +) + +# HELPERS ----------------------------------------------------------------------------------------- + + +async def _request_create_project( + client: TestClient, + project_data: Optional[ProjectCreate] = None, + *, + # -> source + # from_project_uuid: Optional[UUIDStr] = None, + # -> destination + # to_template: bool = False + # to_project_uuid: UUIDStr ?? + as_template: Optional[UUIDStr] = None, # from_template_uuid + template_uuid: Optional[UUIDStr] = None, # to_template_uuid (client defines uuid!) + # other options not in the body + copy_data: bool = True, + hidden: bool = False, # not in the body +) -> web.Response: + + url = client.app.router["create_projects"].url_for() + assert str(url) == f"/{api_version_prefix}/projects" + + params = {} + if template_uuid: + params["template_uuid"] = template_uuid + if as_template: + params["as_template"] = as_template + if copy_data: + params["copy_data"] = copy_data + if hidden: + params["hidden"] = copy_data + + if project_data: + project_data = project_data.dict(exclude_unset=True) + + resp = await client.post(url, params=params, json=project_data) + return resp + + +# TESTS ----------------------------------------------------------------------------------------- + + +@pytest.mark.skip(reason="UNDER DEV") +@pytest.mark.parametrize(*standard_role_response()) +async def test_new_project( + client: TestClient, + logged_user: UserInfoDict, + primary_group: Dict[str, str], + faker: Faker, + expected: ExpectedResponse, + storage_subsystem_mock, + project_db_cleaner, +): + # TODO: fixture with hypothesis on ProjectCreate?? + project_create = ProjectCreate( + name="Project from test_new_project", + thumbnail=faker.image_url(widht=60, height=60), + prj_owner=logged_user["email"], + access_rights=primary_group, + ) + + resp = await _request_create_project(client, project_create) + + data, _ = await assert_status(resp, expected.created) + if data: + project_get = ProjectGet.parse_obj(data) + + assert project_get.dict( + include=project_create.__fields_set__ + ) == project_create.dict(exclude_unset=True) + + +@pytest.mark.skip(reason="UNDER DEV") +@pytest.mark.parametrize(*standard_role_response()) +async def test_new_project_from_template( + client, + logged_user, + primary_group: Dict[str, str], + template_project, + expected, + storage_subsystem_mock, + project_db_cleaner, +): + raise NotImplementedError + + +@pytest.mark.skip(reason="UNDER DEV") +@pytest.mark.parametrize(*standard_role_response()) +async def test_new_project_from_template_with_body( + client, + logged_user, + primary_group: Dict[str, str], + standard_groups: List[Dict[str, str]], + template_project, + expected, + storage_subsystem_mock, + project_db_cleaner, +): + predefined = { + "uuid": "", + "name": "Sleepers8", + "description": "Some lines from user", + "thumbnail": "", + "prjOwner": "", + "creationDate": "2019-06-03T09:59:31.987Z", + "lastChangeDate": "2019-06-03T09:59:31.987Z", + "accessRights": { + str(standard_groups[0]["gid"]): { + "read": True, + "write": True, + "delete": False, + } + }, + "workbench": {}, + "tags": [], + "classifiers": [], + } + raise NotImplementedError + + +@pytest.mark.skip(reason="UNDER DEV") +@pytest.mark.parametrize(*standard_role_response()) +async def test_new_template_from_project( + client: TestClient, + logged_user: Dict[str, Any], + primary_group: Dict[str, str], + all_group: Dict[str, str], + user_project: Dict[str, Any], + expected: ExpectedResponse, + storage_subsystem_mock: MockedStorageSubsystem, + catalog_subsystem_mock: Callable, + project_db_cleaner: None, +): + # POST /v0/projects?as_template={project_uuid} + url = ( + client.app.router["create_projects"] + .url_for() + .with_query(as_template=user_project["uuid"]) + ) + + resp = await client.post(f"{url}") + data, error = await assert_status(resp, expected.created) + + raise NotImplementedError From 5f1fbcee8e08c967aa0d17bf465d2e8d272682f8 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 20 Apr 2022 16:12:00 +0200 Subject: [PATCH 09/19] renames test files to fit module names --- .../web/server/tests/unit/with_dbs/02/test_projects__create.py | 2 +- .../{test_projects_crud.py => test_projects_handlers_crud.py} | 0 ...handlers_create.py => test_projects_handlers_crud_create.py} | 0 ...projects_delete.py => test_projects_handlers_crud_delete.py} | 0 4 files changed, 1 insertion(+), 1 deletion(-) rename services/web/server/tests/unit/with_dbs/02/{test_projects_crud.py => test_projects_handlers_crud.py} (100%) rename services/web/server/tests/unit/with_dbs/02/{test_projects_handlers_create.py => test_projects_handlers_crud_create.py} (100%) rename services/web/server/tests/unit/with_dbs/02/{test_projects_delete.py => test_projects_handlers_crud_delete.py} (100%) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__create.py b/services/web/server/tests/unit/with_dbs/02/test_projects__create.py index eb644e8d6b3..7899887490e 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects__create.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__create.py @@ -50,7 +50,7 @@ def test_clone_project(project_uuid: UUID): @pytest.mark.skip(reason="UNDER DEV") -def test_long_running_copy_project(heavy_project_uuid): +def test_long_running_copy_project(app, user_id, heavy_project_uuid): task = _create.schedule_task(app, user_id, heavy_project_uuid) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud.py b/services/web/server/tests/unit/with_dbs/02/test_projects_handlers_crud.py similarity index 100% rename from services/web/server/tests/unit/with_dbs/02/test_projects_crud.py rename to services/web/server/tests/unit/with_dbs/02/test_projects_handlers_crud.py diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_handlers_create.py b/services/web/server/tests/unit/with_dbs/02/test_projects_handlers_crud_create.py similarity index 100% rename from services/web/server/tests/unit/with_dbs/02/test_projects_handlers_create.py rename to services/web/server/tests/unit/with_dbs/02/test_projects_handlers_crud_create.py diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_handlers_crud_delete.py similarity index 100% rename from services/web/server/tests/unit/with_dbs/02/test_projects_delete.py rename to services/web/server/tests/unit/with_dbs/02/test_projects_handlers_crud_delete.py From cae10a2a0b241388fcd5a523f73c280fb6deb261 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 20 Apr 2022 18:44:17 +0200 Subject: [PATCH 10/19] WIP [skip CI] --- .../projects/_access_rights.py | 206 ++++++++++++++++++ .../projects/_create.py | 84 +++---- .../02/test_projects__access_rights.py | 27 +++ .../unit/with_dbs/02/test_projects__create.py | 60 ++++- 4 files changed, 339 insertions(+), 38 deletions(-) create mode 100644 services/web/server/src/simcore_service_webserver/projects/_access_rights.py create mode 100644 services/web/server/tests/unit/with_dbs/02/test_projects__access_rights.py diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights.py b/services/web/server/src/simcore_service_webserver/projects/_access_rights.py new file mode 100644 index 00000000000..c28f7cf4dba --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_access_rights.py @@ -0,0 +1,206 @@ +""" Helper functions to determine access rights on projects + +# DRAFT Rationale: + + osparc-simcore defines TWO authorization methods: i.e. a set of rules on what, + how and when any resource can be accessed or operated by a user + + ## ROLE-BASED METHOD: + In this method, a user is assigned a role (user/tester/admin) upon registration. Each role is + system-wide and defines a set of operations that the user *can* perform + - Every operation is named as a resource and an action (e.g. ) + - Resource is named hierarchically + - Roles can inherit permitted operations from other role + This method is static because is system-wide and it is defined directly in the + code at services/web/server/src/simcore_service_webserver/security_roles.py + It is defined on top of every API entrypoint and applied just after authentication of the user. + + ## GROUP-BASED METHOD: + The second method is designed to dynamically share a resource among groups of users. A group + defines a set of rules that apply to a resource and users can be added to the group dynamically. + So far, there are two resources that define access rights (AR): + - one applies to projects (read/write/delete) and + - the other to services (execute/write) + The project access rights are set in the column "access_rights" of the "projects" table . + The service access rights has its own table: service_access_rights + + Access rights apply hierarchically, meaning that the access granted to a project applies + to all nodes inside and stored data in nodes. + + How do these two AR coexist?: Access to read, write or delete a project are defined in the project AR but execution + will depend on the service AR attached to nodes inside. + + What about stored data? + - data generated in nodes inherits the AR from the associated project + - data generated in API uses full AR provided by ownership (i.e. user_id in files_meta_data table) + +""" + + +import logging +from dataclasses import dataclass +from typing import Dict, List, Optional + +import sqlalchemy as sa +from aiopg.sa.connection import SAConnection +from aiopg.sa.result import ResultProxy, RowProxy +from models_library.projects import ProjectID +from simcore_postgres_database.storage_models import user_to_groups +from sqlalchemy.sql import text + +logger = logging.getLogger(__name__) + + +@dataclass +class AccessRights: + read: bool + write: bool + delete: bool + + @classmethod + def all(cls) -> "AccessRights": + return cls(True, True, True) + + @classmethod + def none(cls) -> "AccessRights": + return cls(False, False, False) + + +class AccessLayerError(Exception): + """Base class for access-layer related errors""" + + +class InvalidFileIdentifier(AccessLayerError): + """Identifier does not follow the criteria to + be a file identifier (see naming criteria below) + """ + + def __init__(self, identifier, reason=None, details=None): + self.identifier = identifier + self.reason = reason or "Invalid file identifier" + self.details = details + + super().__init__(self.reason, self.details) + + def __str__(self): + return "Error in {}: {} [{}]".format(self.identifier, self.reason, self.details) + + +async def _get_user_groups_ids(conn: SAConnection, user_id: int) -> List[int]: + stmt = sa.select([user_to_groups.c.gid]).where(user_to_groups.c.uid == user_id) + rows = await (await conn.execute(stmt)).fetchall() + user_group_ids = [g.gid for g in rows] + return user_group_ids + + +def _aggregate_access_rights( + access_rights: Dict[str, Dict], group_ids: List[int] +) -> AccessRights: + try: + prj_access = {"read": False, "write": False, "delete": False} + for gid, grp_access in access_rights.items(): + if int(gid) in group_ids: + for operation in grp_access: + prj_access[operation] |= grp_access[operation] + + return AccessRights(**prj_access) + except KeyError: + # NOTE: database does NOT include schema for json access_rights column! + logger.warning( + "Invalid entry in projects.access_rights. Revoking all rights [%s]", + access_rights, + ) + return AccessRights.none() + + +async def list_projects_access_rights( + conn: SAConnection, user_id: int +) -> Dict[ProjectID, AccessRights]: + """ + Returns access-rights of user (user_id) over all OWNED or SHARED projects + """ + + user_group_ids: List[int] = await _get_user_groups_ids(conn, user_id) + + smt = text( + f"""\ + SELECT uuid, access_rights + FROM projects + WHERE ( + prj_owner = {user_id} + OR jsonb_exists_any( access_rights, ( + SELECT ARRAY( SELECT gid::TEXT FROM user_to_groups WHERE uid = {user_id} ) + ) + ) + ) + """ + ) + projects_access_rights = {} + + async for row in conn.execute(smt): + assert isinstance(row.access_rights, dict) + assert isinstance(row.uuid, ProjectID) + + if row.access_rights: + # TODO: access_rights should be direclty filtered from result in stm instead calling again user_group_ids + projects_access_rights[row.uuid] = _aggregate_access_rights( + row.access_rights, user_group_ids + ) + + else: + # backwards compatibility + # - no access_rights defined BUT project is owned + projects_access_rights[row.uuid] = AccessRights.all() + + return projects_access_rights + + +async def get_project_access_rights( + conn: SAConnection, user_id: int, project_id: ProjectID +) -> AccessRights: + """ + Returns access-rights of user (user_id) over a project resource (project_id) + """ + user_group_ids: List[int] = await _get_user_groups_ids(conn, user_id) + + stmt = text( + f"""\ + SELECT prj_owner, access_rights + FROM projects + WHERE ( + ( uuid = '{project_id}' ) AND ( + prj_owner = {user_id} + OR jsonb_exists_any( access_rights, ( + SELECT ARRAY( SELECT gid::TEXT FROM user_to_groups WHERE uid = {user_id} ) + ) + ) + ) + ) + """ + ) + + result: ResultProxy = await conn.execute(stmt) + row: Optional[RowProxy] = await result.first() + + if not row: + # Either project does not exists OR user_id has NO access + return AccessRights.none() + + assert row.prj_owner is None or isinstance(row.prj_owner, int) + assert isinstance(row.access_rights, dict) + + if row.prj_owner == user_id: + return AccessRights.all() + + # determine user's access rights by aggregating AR of all groups + prj_access = _aggregate_access_rights(row.access_rights, user_group_ids) + return prj_access + + +# HELPERS ----------------------------------------------- + + +async def get_readable_project_ids(conn: SAConnection, user_id: int) -> List[ProjectID]: + """Returns a list of projects where user has granted read-access""" + projects_access_rights = await list_projects_access_rights(conn, int(user_id)) + return [pid for pid, access in projects_access_rights.items() if access.read] diff --git a/services/web/server/src/simcore_service_webserver/projects/_create.py b/services/web/server/src/simcore_service_webserver/projects/_create.py index 087e783a3f9..6e3090db555 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_create.py @@ -5,10 +5,11 @@ import logging from copy import deepcopy from typing import Any, Dict, Optional, Tuple -from uuid import UUID, uuid1, uuid5 +from uuid import UUID, uuid4, uuid5 from aiohttp import web from models_library.basic_types import UUIDStr +from models_library.projects import ProjectID from models_library.users import UserID from ..storage_api import copy_data_folders_from_project @@ -19,6 +20,8 @@ NodesMap = Dict[UUIDStr, UUIDStr] +AUTO_CREATE_UUID = None + def _replace_uuids(entity: Any, project_map, nodes_map) -> Any: @@ -47,9 +50,9 @@ def _replace_uuids(entity: Any, project_map, nodes_map) -> Any: def _clone_project_from( - project: ProjectDict, + source_project: ProjectDict, *, - new_project_id: Optional[UUID], + new_project_id: ProjectID, clean_output_data: bool = False, ) -> Tuple[ProjectDict, NodesMap]: """ @@ -59,62 +62,60 @@ def _clone_project_from( - w/ or w/o outputs """ # - # TODO: not robust to changes in project schema + # TODO: Not robust to changes in project schema + # e.g. how to guarantee these are the outputs? + # should we mark these fields? # - project_copy = deepcopy(project) - - # Update project id - # NOTE: this can be re-assigned by dbapi if not unique - if new_project_id: - assert isinstance(new_project_id, UUID) # nosec - project_copy_uuid = new_project_id - else: - project_copy_uuid = uuid1() # random project id - - project_copy["uuid"] = str(project_copy_uuid) + cloned_project = deepcopy(source_project) + cloned_project["uuid"] = f"{new_project_id}" # Workbench nodes shall be unique within the project context - def _create_new_node_uuid(old_uuid): - return str(uuid5(project_copy_uuid, str(old_uuid))) + def _create_new_node_uuid(previous_uuid): + return f"{uuid5(new_project_id, f'{previous_uuid}')}" - nodes_map = {} - for node_uuid in project.get("workbench", {}).keys(): + nodes_map: NodesMap = {} + for node_uuid in source_project.get("workbench", {}).keys(): nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) - project_map = {project["uuid"]: project_copy["uuid"]} + project_map = {source_project["uuid"]: cloned_project["uuid"]} - project_copy["workbench"] = _replace_uuids( - project_copy.get("workbench", {}), project_map, nodes_map + cloned_project["workbench"] = _replace_uuids( + cloned_project.get("workbench", {}), project_map, nodes_map ) - if "ui" in project_copy: - project_copy["ui"]["workbench"] = _replace_uuids( - project_copy["ui"].get("workbench", {}), project_map, nodes_map + + if "ui" in cloned_project: + cloned_project["ui"]["workbench"] = _replace_uuids( + cloned_project["ui"].get("workbench", {}), project_map, nodes_map ) - project_copy["ui"]["slideshow"] = _replace_uuids( - project_copy["ui"].get("slideshow", {}), project_map, nodes_map + cloned_project["ui"]["slideshow"] = _replace_uuids( + cloned_project["ui"].get("slideshow", {}), project_map, nodes_map ) - if "mode" in project_copy["ui"]: - project_copy["ui"]["mode"] = project_copy["ui"]["mode"] + if "mode" in cloned_project["ui"]: + cloned_project["ui"]["mode"] = cloned_project["ui"]["mode"] if clean_output_data: FIELDS_TO_DELETE = ("outputs", "progress", "runHash") - for node_data in project_copy.get("workbench", {}).values(): + for node_data in cloned_project.get("workbench", {}).values(): for field in FIELDS_TO_DELETE: node_data.pop(field, None) - return project_copy, nodes_map + return cloned_project, nodes_map -async def copy_project( +async def clone_project( app: web.Application, - project: ProjectDict, user_id: UserID, - new_project_id: Optional[UUID] = None, + project: ProjectDict, + *, + new_project_id: Optional[UUID] = AUTO_CREATE_UUID, ) -> ProjectDict: - """Clones both document and data folders of a project + """ + Let's define clone as a copy with potentially some field + updates (e.g. new identifiers, etc). - - document + Clones both project in db and its associated data-folders + - project - get new identifiers for project and nodes - data folders - folder name composes as project_uuid/node_uuid @@ -124,8 +125,15 @@ async def copy_project( # TODO: set as invisible and set visible when copied so it can be used? # TODO: atomic operation? + # TODO: can perform action? check whether user_id can clone project + # TODO: perform action? + db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + # Update project id + new_project_id = new_project_id or uuid4() + assert isinstance(new_project_id, UUID) # nosec + # creates clone project_copy, nodes_map = _clone_project_from( project, @@ -146,4 +154,6 @@ async def copy_project( return updated_project -# def create_project( source_project: ProjectID, ) +# TODO: schedule task +# TODO: long running https://google.aip.dev/151 +# TODO: create_project(...) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__access_rights.py b/services/web/server/tests/unit/with_dbs/02/test_projects__access_rights.py new file mode 100644 index 00000000000..e013ea9df8f --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__access_rights.py @@ -0,0 +1,27 @@ +from typing import Optional + +from aiohttp.test_utils import TestClient +from models_library.projects import ProjectID +from models_library.users import UserID +from simcore_service_webserver._constants import APP_PROJECT_DBAPI +from simcore_service_webserver.projects._access_rights import ( + AccessRights, + get_project_access_rights, +) +from simcore_service_webserver.projects.projects_db import ProjectDBAPI + + +async def test_access_rights( + client: TestClient, user_id: UserID, project_id: ProjectID +): + assert client.app + + db: ProjectDBAPI = client.app[APP_PROJECT_DBAPI] + + async with db.engine.acquire() as conn, conn.begin(): + # access layer + can: Optional[AccessRights] = await get_project_access_rights( + conn, int(user_id), project_id + ) + assert not can.read + assert not can.delete diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__create.py b/services/web/server/tests/unit/with_dbs/02/test_projects__create.py index 7899887490e..30de380de53 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects__create.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__create.py @@ -4,12 +4,19 @@ # pylint: disable=unused-variable +import json +from copy import deepcopy +from pathlib import Path +from typing import Any, Dict from uuid import UUID +import jsonschema import pytest +from jsonschema import ValidationError from models_library.projects import Workbench from pydantic import BaseModel from simcore_service_webserver.projects import _create +from simcore_service_webserver.projects.project_models import ProjectDict # HELPERS ----------------------------------------------------------------------------------------- @@ -26,8 +33,59 @@ def get(self, uuid): # TESTS ----------------------------------------------------------------------------------------- +@pytest.fixture +def project_schema(project_schema_file: Path) -> Dict[str, Any]: + with open(project_schema_file) as fh: + schema = json.load(fh) + return schema + + +@pytest.mark.parametrize( + "test_data_file_name", + [ + "fake-project.json", + "fake-template-projects.hack08.notebooks.json", + "fake-template-projects.isan.2dplot.json", + "fake-template-projects.isan.matward.json", + "fake-template-projects.isan.paraview.json", + "fake-template-projects.isan.ucdavis.json", + "fake-template-projects.sleepers.json", + ], +) +def test_clone_project_row( + test_data_file_name: str, + project_schema: Dict[str, Any], + tests_data_dir: Path, + app, + user_id, +): + original_project: ProjectDict = json.loads( + (tests_data_dir / test_data_file_name).read_text() + ) + + source_project: ProjectDict = deepcopy(original_project) + clone, _ = _create.clone_project( + app, user_id, source_project, new_project_id=_create.AUTO_CREATE_UUID + ) + + # was not modified by clone_project_document + assert source_project == original_project + + # valid clone + assert clone["uuid"] != original_project["uuid"] + + node_ids = original_project["workbench"].keys() + for clone_node_id in clone["workbench"]: + assert clone_node_id not in node_ids + + try: + jsonschema.validate(instance=clone, schema=project_schema) + except ValidationError as err: + pytest.fail(f"Invalid clone of '{test_data_file_name}': {err.message}") + + @pytest.mark.skip(reason="UNDER DEV") -def test_clone_project(project_uuid: UUID): +def test_clone_project_dev(project_uuid: UUID): # a project in db repo = ProjectRepo() From 32b58e7e317d6d47107d0d732e7c91035edc6ec6 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 12 May 2022 10:06:04 +0200 Subject: [PATCH 11/19] minor --- .../projects/_access_rights.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights.py b/services/web/server/src/simcore_service_webserver/projects/_access_rights.py index c28f7cf4dba..e5c5c2a6d90 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_access_rights.py +++ b/services/web/server/src/simcore_service_webserver/projects/_access_rights.py @@ -51,6 +51,9 @@ logger = logging.getLogger(__name__) +# MODELS ----------------------------------------------- + + @dataclass class AccessRights: read: bool @@ -66,6 +69,7 @@ def none(cls) -> "AccessRights": return cls(False, False, False) +# ERRORS -------------------------------------------------- class AccessLayerError(Exception): """Base class for access-layer related errors""" @@ -86,6 +90,9 @@ def __str__(self): return "Error in {}: {} [{}]".format(self.identifier, self.reason, self.details) +# HELPERS --------------------------------------------- + + async def _get_user_groups_ids(conn: SAConnection, user_id: int) -> List[int]: stmt = sa.select([user_to_groups.c.gid]).where(user_to_groups.c.uid == user_id) rows = await (await conn.execute(stmt)).fetchall() @@ -113,6 +120,7 @@ def _aggregate_access_rights( return AccessRights.none() +# API ------------------------------------------------------------ async def list_projects_access_rights( conn: SAConnection, user_id: int ) -> Dict[ProjectID, AccessRights]: @@ -197,9 +205,6 @@ async def get_project_access_rights( return prj_access -# HELPERS ----------------------------------------------- - - async def get_readable_project_ids(conn: SAConnection, user_id: int) -> List[ProjectID]: """Returns a list of projects where user has granted read-access""" projects_access_rights = await list_projects_access_rights(conn, int(user_id)) From fba2e1b0d8f1a17baea69d05a69ee25990910567 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 13 May 2022 10:13:31 +0200 Subject: [PATCH 12/19] rename access --- .../projects/{_access_rights.py => _access.py} | 0 ...test_projects__access_rights.py => test_projects__access.py} | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename services/web/server/src/simcore_service_webserver/projects/{_access_rights.py => _access.py} (100%) rename services/web/server/tests/unit/with_dbs/02/{test_projects__access_rights.py => test_projects__access.py} (92%) diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights.py b/services/web/server/src/simcore_service_webserver/projects/_access.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/projects/_access_rights.py rename to services/web/server/src/simcore_service_webserver/projects/_access.py diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__access_rights.py b/services/web/server/tests/unit/with_dbs/02/test_projects__access.py similarity index 92% rename from services/web/server/tests/unit/with_dbs/02/test_projects__access_rights.py rename to services/web/server/tests/unit/with_dbs/02/test_projects__access.py index e013ea9df8f..981d22e0cb8 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects__access_rights.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__access.py @@ -4,7 +4,7 @@ from models_library.projects import ProjectID from models_library.users import UserID from simcore_service_webserver._constants import APP_PROJECT_DBAPI -from simcore_service_webserver.projects._access_rights import ( +from simcore_service_webserver.projects._access import ( AccessRights, get_project_access_rights, ) From 5d9fdc585c7d3880178aed5fa025b536f19ea8cd Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 13 May 2022 10:14:49 +0200 Subject: [PATCH 13/19] moves validation helper to models --- .../projects/project_models.py | 10 ++++++++++ .../simcore_service_webserver/projects/projects_api.py | 10 +--------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/project_models.py b/services/web/server/src/simcore_service_webserver/projects/project_models.py index ed1703ab80a..2a0f6758a9c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/project_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/project_models.py @@ -1,9 +1,11 @@ +import asyncio import json from enum import Enum from typing import Any, Dict, Optional, Tuple from aiohttp import web from aiopg.sa.result import RowProxy +from servicelib.aiohttp.jsonschema_validation import validate_instance from simcore_postgres_database.models.projects import ProjectType from .._constants import APP_JSONSCHEMA_SPECS_KEY @@ -43,8 +45,16 @@ def setup_projects_model_schema(app: web.Application): return app[APP_JSONSCHEMA_SPECS_KEY]["projects"] +async def validate_project(app: web.Application, project: ProjectDict): + project_schema = app[APP_JSONSCHEMA_SPECS_KEY]["projects"] + await asyncio.get_event_loop().run_in_executor( + None, validate_instance, project, project_schema + ) + + __all__: Tuple[str, ...] = ( "ProjectDict", "ProjectProxy", "setup_projects_model_schema", + "validate_project", ) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 3aadf7e2e74..ba847fb9178 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -31,8 +31,6 @@ ) from models_library.users import UserID from pydantic.types import PositiveInt -from servicelib.aiohttp.application_keys import APP_JSONSCHEMA_SPECS_KEY -from servicelib.aiohttp.jsonschema_validation import validate_instance from servicelib.json_serialization import json_dumps from servicelib.observer import observe from servicelib.utils import fire_and_forget_task, logged_gather @@ -54,6 +52,7 @@ from ..users_exceptions import UserNotFoundError from . import _delete from .project_lock import UserNameDict, get_project_locked_state, lock_project +from .project_models import validate_project from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI from .projects_exceptions import NodeNotFoundError, ProjectLockError from .projects_utils import extract_dns_without_default_port @@ -67,13 +66,6 @@ def _is_node_dynamic(node_key: str) -> bool: return "/dynamic/" in node_key -async def validate_project(app: web.Application, project: Dict): - project_schema = app[APP_JSONSCHEMA_SPECS_KEY]["projects"] - await asyncio.get_event_loop().run_in_executor( - None, validate_instance, project, project_schema - ) - - async def get_project_for_user( app: web.Application, project_uuid: str, From 0ceb5bb1aeff9dc7b29ab96ddca732fbc7405f60 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 13 May 2022 10:17:06 +0200 Subject: [PATCH 14/19] drafts _create --- .../projects/_create.py | 56 +++++++++++++------ 1 file changed, 39 insertions(+), 17 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/_create.py b/services/web/server/src/simcore_service_webserver/projects/_create.py index 6e3090db555..490740ba8fa 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_create.py @@ -1,4 +1,5 @@ -""" Implementations of project creations +""" Implementations of project creation, either cloning other or +from scratch """ @@ -13,8 +14,9 @@ from models_library.users import UserID from ..storage_api import copy_data_folders_from_project -from .project_models import ProjectDict +from .project_models import ProjectDict, validate_project from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI +from .projects_exceptions import ProjectNotFoundError log = logging.getLogger(__name__) @@ -49,7 +51,7 @@ def _replace_uuids(entity: Any, project_map, nodes_map) -> Any: return entity -def _clone_project_from( +def _clone_project_model( source_project: ProjectDict, *, new_project_id: ProjectID, @@ -106,8 +108,10 @@ def _create_new_node_uuid(previous_uuid): async def clone_project( app: web.Application, user_id: UserID, - project: ProjectDict, + source_project_id: ProjectID, *, + copy_data: bool = True, + as_template: bool = False, new_project_id: Optional[UUID] = AUTO_CREATE_UUID, ) -> ProjectDict: """ @@ -130,30 +134,48 @@ async def clone_project( db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + # 1 ------------- + # Get source project + # TODO: use API instead (will verify e.g. access) + source_project = await db.get_user_project(user_id, f"{source_project_id}") + if not source_project: + raise ProjectNotFoundError(source_project_id) + + # 2 ------------- # Update project id new_project_id = new_project_id or uuid4() assert isinstance(new_project_id, UUID) # nosec # creates clone - project_copy, nodes_map = _clone_project_from( - project, + project_copy, nodes_map = _clone_project_model( + source_project, new_project_id=new_project_id, clean_output_data=False, ) - # inject in db - await db.add_project(project_copy, user_id, force_project_uuid=True) - - # access writes apply again in - updated_project = await copy_data_folders_from_project( - app, project, project_copy, nodes_map, user_id + # inject in db # TODO: repository pattern + await validate_project(app, project_copy) + await db.add_project( + project_copy, user_id, force_project_uuid=True, force_as_template=True ) - assert updated_project == project_copy # nosec + # 3 ------------- + + if copy_data: + # TODO: schedule task? + # TODO: long running https://google.aip.dev/151 ? + + # access writes apply again in + updated_project = await copy_data_folders_from_project( + app, source_project, project_copy, nodes_map, user_id + ) - return updated_project + assert updated_project == project_copy # nosec + # 4 ------------- + # update access rights + if source_project["type"] == "TEMPLATE": + # remove template access rights + project_copy["accessRights"] = {} -# TODO: schedule task -# TODO: long running https://google.aip.dev/151 -# TODO: create_project(...) + return project_copy From 73b3d14aa37da2a8ff57aea6e27817c7dca2d51b Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 13 May 2022 13:46:53 +0200 Subject: [PATCH 15/19] parse_reqeust_parameters_as implementation --- .../service-library/requirements/_aiohttp.in | 10 +- .../service-library/requirements/_base.in | 2 +- .../aiohttp/requests_parameters_model.py | 0 .../src/servicelib/aiohttp/requests_utils.py | 58 +++++++++++ .../tests/aiohttp/test_requests_utils.py | 95 +++++++++++++++++++ 5 files changed, 158 insertions(+), 7 deletions(-) create mode 100644 packages/service-library/src/servicelib/aiohttp/requests_parameters_model.py create mode 100644 packages/service-library/tests/aiohttp/test_requests_utils.py diff --git a/packages/service-library/requirements/_aiohttp.in b/packages/service-library/requirements/_aiohttp.in index a3ad443553f..ce68c6d2cfd 100644 --- a/packages/service-library/requirements/_aiohttp.in +++ b/packages/service-library/requirements/_aiohttp.in @@ -5,14 +5,12 @@ --constraint ./_base.in -openapi-core==0.12.0 # frozen until https://github.com/ITISFoundation/osparc-simcore/pull/1396 is CLOSED -lazy-object-proxy~=1.4.3 # cannot upgrade due to contraints in openapi-core -aiozipkin - aiohttp aiopg[sa] +aiozipkin +attrs jsonschema +lazy-object-proxy~=1.4.3 # cannot upgrade due to contraints in openapi-core +openapi-core==0.12.0 # frozen until https://github.com/ITISFoundation/osparc-simcore/pull/1396 is CLOSED prometheus_client -attrs - werkzeug diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in index cb03cd52dba..7c25a1c93d0 100644 --- a/packages/service-library/requirements/_base.in +++ b/packages/service-library/requirements/_base.in @@ -5,8 +5,8 @@ --constraint ./constraints.txt aiodebug +aiofiles pydantic pyinstrument pyyaml tenacity -aiofiles \ No newline at end of file diff --git a/packages/service-library/src/servicelib/aiohttp/requests_parameters_model.py b/packages/service-library/src/servicelib/aiohttp/requests_parameters_model.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/service-library/src/servicelib/aiohttp/requests_utils.py b/packages/service-library/src/servicelib/aiohttp/requests_utils.py index 8f47f0b3698..7e00fc1dc2a 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_utils.py @@ -1,4 +1,7 @@ +from typing import Optional, Type, TypeVar + from aiohttp import web +from pydantic import BaseModel, ValidationError def get_request(*args, **kwargs) -> web.BaseRequest: @@ -12,3 +15,58 @@ def get_request(*args, **kwargs) -> web.BaseRequest: ) raise RuntimeError(msg) return request + + +M = TypeVar("M", bound=BaseModel) + + +def parse_request_parameters_as( + parameters_model_cls: Type[M], + request: web.Request, + *, + app_storage_map: Optional[dict[str, str]] = None, +) -> M: + """Parses request parameters as defined in 'parameters_model_cls' and raises HTTPBadRequest if validation fails + + - 'app_storage_map' maps field name to app's storage key. + + Analogous to pydantic.tools.parse_obj_as() + + NOTE: for the request body, you can proceed as + + body_model = parse_obj_as(ModelGet, await request.json()) + + :raises HTTPBadRequest if validation of parameters or queries fail + :raises ValidationError if app key fails validation + """ + app_storage_map = app_storage_map or {} + data = { + **request.match_info, + **request.query, + **{ + field_name: request.app.get(app_key) + for field_name, app_key in app_storage_map.items() + }, + } + try: + model = parameters_model_cls.parse_obj(data) + return model + + except ValidationError as e: + bad_params_errors: list[str] = [] + request_parameters = set(data.keys()) - set(app_storage_map.keys()) + + for error in e.errors(): + name = error["loc"][-1] + if name in request_parameters: + bad_params_errors.append( + f"Invalid {name}='{data[name]}' since {error['msg']} " + ) + + if bad_params_errors: + raise web.HTTPBadRequest( + reason=f"Invalid request parameters: {'; '.join(bad_params_errors)}" + ) + + # otherwise is an app value, re-raises err + raise diff --git a/packages/service-library/tests/aiohttp/test_requests_utils.py b/packages/service-library/tests/aiohttp/test_requests_utils.py new file mode 100644 index 00000000000..bc29118bdb1 --- /dev/null +++ b/packages/service-library/tests/aiohttp/test_requests_utils.py @@ -0,0 +1,95 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from uuid import UUID + +import pytest +from aiohttp import web +from aiohttp.test_utils import make_mocked_request +from faker import Faker +from pydantic import BaseModel, ValidationError +from servicelib.aiohttp.requests_utils import parse_request_parameters_as + + +class MyRequestParameters(BaseModel): + user_id: int # app[] + project_uuid: UUID + is_ok: bool = True + + +RQT_USERID_KEY = f"{__name__}.user_id" +APP_KEYS_MAP = {"user_id": RQT_USERID_KEY} + + +@pytest.fixture +def fake_params(faker: Faker): + return MyRequestParameters( + user_id=faker.pyint(), project_uuid=faker.uuid4(), is_ok=faker.pybool() + ) + + +@pytest.fixture +def app(fake_params: MyRequestParameters) -> web.Application: + app = web.Application() + app[RQT_USERID_KEY] = fake_params.user_id + return app + + +# TESTS + + +def test_parse_request_parameters_as( + fake_params: MyRequestParameters, app: web.Application +): + + request = make_mocked_request( + "GET", + f"/projects/{fake_params.project_uuid}?is_ok={fake_params.is_ok}", + match_info={"project_uuid": fake_params.project_uuid}, + app=app, + ) + + parsed_params = parse_request_parameters_as( + MyRequestParameters, request, app_storage_map=APP_KEYS_MAP + ) + + assert parsed_params == fake_params + + +def test_parse_request_parameters_raises_http_error( + fake_params: MyRequestParameters, app: web.Application +): + + request = make_mocked_request( + "GET", + f"/projects/1234?is_ok={fake_params.is_ok}", + match_info={"project_uuid": "1234"}, + app=app, + ) + + with pytest.raises(web.HTTPBadRequest) as exc_info: + parse_request_parameters_as( + MyRequestParameters, request, app_storage_map=APP_KEYS_MAP + ) + bad_request_exc = exc_info.value + assert "project_uuid" in bad_request_exc.reason + + +def test_parse_request_parameters_raises_validation_error( + fake_params: MyRequestParameters, +): + + request = make_mocked_request( + "GET", + f"/projects/{fake_params.project_uuid}?is_ok={fake_params.is_ok}", + match_info={"project_uuid": fake_params.project_uuid}, + app=web.Application(), # no user! + ) + + with pytest.raises(ValidationError) as exc_info: + parse_request_parameters_as( + MyRequestParameters, request, app_storage_map=APP_KEYS_MAP + ) + + assert "user_id" in exc_info.value.errors()[0]["loc"] From 72956a25a83a857e7e76e93ad66ea1aaa6481f40 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 13 May 2022 18:00:52 +0200 Subject: [PATCH 16/19] refactor requests-validation --- .../aiohttp/requests_parameters_model.py | 0 .../src/servicelib/aiohttp/requests_utils.py | 58 ------- .../servicelib/aiohttp/requests_validation.py | 85 ++++++++++ .../tests/aiohttp/test_requests_utils.py | 95 ------------ .../tests/aiohttp/test_requests_validation.py | 145 ++++++++++++++++++ 5 files changed, 230 insertions(+), 153 deletions(-) delete mode 100644 packages/service-library/src/servicelib/aiohttp/requests_parameters_model.py create mode 100644 packages/service-library/src/servicelib/aiohttp/requests_validation.py delete mode 100644 packages/service-library/tests/aiohttp/test_requests_utils.py create mode 100644 packages/service-library/tests/aiohttp/test_requests_validation.py diff --git a/packages/service-library/src/servicelib/aiohttp/requests_parameters_model.py b/packages/service-library/src/servicelib/aiohttp/requests_parameters_model.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/packages/service-library/src/servicelib/aiohttp/requests_utils.py b/packages/service-library/src/servicelib/aiohttp/requests_utils.py index 7e00fc1dc2a..8f47f0b3698 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_utils.py @@ -1,7 +1,4 @@ -from typing import Optional, Type, TypeVar - from aiohttp import web -from pydantic import BaseModel, ValidationError def get_request(*args, **kwargs) -> web.BaseRequest: @@ -15,58 +12,3 @@ def get_request(*args, **kwargs) -> web.BaseRequest: ) raise RuntimeError(msg) return request - - -M = TypeVar("M", bound=BaseModel) - - -def parse_request_parameters_as( - parameters_model_cls: Type[M], - request: web.Request, - *, - app_storage_map: Optional[dict[str, str]] = None, -) -> M: - """Parses request parameters as defined in 'parameters_model_cls' and raises HTTPBadRequest if validation fails - - - 'app_storage_map' maps field name to app's storage key. - - Analogous to pydantic.tools.parse_obj_as() - - NOTE: for the request body, you can proceed as - - body_model = parse_obj_as(ModelGet, await request.json()) - - :raises HTTPBadRequest if validation of parameters or queries fail - :raises ValidationError if app key fails validation - """ - app_storage_map = app_storage_map or {} - data = { - **request.match_info, - **request.query, - **{ - field_name: request.app.get(app_key) - for field_name, app_key in app_storage_map.items() - }, - } - try: - model = parameters_model_cls.parse_obj(data) - return model - - except ValidationError as e: - bad_params_errors: list[str] = [] - request_parameters = set(data.keys()) - set(app_storage_map.keys()) - - for error in e.errors(): - name = error["loc"][-1] - if name in request_parameters: - bad_params_errors.append( - f"Invalid {name}='{data[name]}' since {error['msg']} " - ) - - if bad_params_errors: - raise web.HTTPBadRequest( - reason=f"Invalid request parameters: {'; '.join(bad_params_errors)}" - ) - - # otherwise is an app value, re-raises err - raise diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py new file mode 100644 index 00000000000..8a146f674d1 --- /dev/null +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -0,0 +1,85 @@ +""" Parses and validation aiohttp requests against pydantic models + +An analogous to pydantic.tools.parse_obj_as(...) for aiohttp's requests +""" + +from typing import Type, TypeVar + +from aiohttp import web +from pydantic import BaseModel, ValidationError +from servicelib.json_serialization import json_dumps + +M = TypeVar("M", bound=BaseModel) + + +def _convert_to_http_error_kwargs( + error: ValidationError, + reason: str, +): + details = [ + {"loc": ".".join(map(str, e["loc"])), "msg": e["msg"]} for e in error.errors() + ] + + return dict( + reason=reason.format(failed=", ".join(d["loc"] for d in details)), + body=json_dumps({"error": details}), + content_type="application/json", + ) + + +def parse_request_parameters_as( + parameters_schema: Type[M], + request: web.Request, +) -> M: + """Parses path and query parameters from 'request' and validates against 'parameters_schema' + + :raises HTTPBadRequest if validation of parameters or queries fail + """ + try: + params = { + **request.match_info, + **request.query, + } + return parameters_schema.parse_obj(params) + + except ValidationError as err: + raise web.HTTPBadRequest( + **_convert_to_http_error_kwargs( + err, + reason="Invalid parameters {failed} in request", + ) + ) + + +async def parse_request_body_as(model_schema: Type[M], request: web.Request) -> M: + """Parses and validates request body against schema + + :raises HTTPBadRequest + """ + try: + body = await request.json() + return model_schema.parse_obj(body) + + except ValidationError as err: + raise web.HTTPBadRequest( + **_convert_to_http_error_kwargs( + err, + reason="Invalid {failed} in request body", + ) + ) + + +def parse_request_context_as( + context_model_cls: Type[M], + request: web.Request, +) -> M: + """Parses and validate request context + + :raises ValidationError + """ + app_ctx = dict(request.app) + req_ctx = dict(request) + + assert set(app_ctx.keys()).intersection(req_ctx.keys()) == set() # nosec + context = {**app_ctx, **req_ctx} + return context_model_cls.parse_obj(context) diff --git a/packages/service-library/tests/aiohttp/test_requests_utils.py b/packages/service-library/tests/aiohttp/test_requests_utils.py deleted file mode 100644 index bc29118bdb1..00000000000 --- a/packages/service-library/tests/aiohttp/test_requests_utils.py +++ /dev/null @@ -1,95 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -from uuid import UUID - -import pytest -from aiohttp import web -from aiohttp.test_utils import make_mocked_request -from faker import Faker -from pydantic import BaseModel, ValidationError -from servicelib.aiohttp.requests_utils import parse_request_parameters_as - - -class MyRequestParameters(BaseModel): - user_id: int # app[] - project_uuid: UUID - is_ok: bool = True - - -RQT_USERID_KEY = f"{__name__}.user_id" -APP_KEYS_MAP = {"user_id": RQT_USERID_KEY} - - -@pytest.fixture -def fake_params(faker: Faker): - return MyRequestParameters( - user_id=faker.pyint(), project_uuid=faker.uuid4(), is_ok=faker.pybool() - ) - - -@pytest.fixture -def app(fake_params: MyRequestParameters) -> web.Application: - app = web.Application() - app[RQT_USERID_KEY] = fake_params.user_id - return app - - -# TESTS - - -def test_parse_request_parameters_as( - fake_params: MyRequestParameters, app: web.Application -): - - request = make_mocked_request( - "GET", - f"/projects/{fake_params.project_uuid}?is_ok={fake_params.is_ok}", - match_info={"project_uuid": fake_params.project_uuid}, - app=app, - ) - - parsed_params = parse_request_parameters_as( - MyRequestParameters, request, app_storage_map=APP_KEYS_MAP - ) - - assert parsed_params == fake_params - - -def test_parse_request_parameters_raises_http_error( - fake_params: MyRequestParameters, app: web.Application -): - - request = make_mocked_request( - "GET", - f"/projects/1234?is_ok={fake_params.is_ok}", - match_info={"project_uuid": "1234"}, - app=app, - ) - - with pytest.raises(web.HTTPBadRequest) as exc_info: - parse_request_parameters_as( - MyRequestParameters, request, app_storage_map=APP_KEYS_MAP - ) - bad_request_exc = exc_info.value - assert "project_uuid" in bad_request_exc.reason - - -def test_parse_request_parameters_raises_validation_error( - fake_params: MyRequestParameters, -): - - request = make_mocked_request( - "GET", - f"/projects/{fake_params.project_uuid}?is_ok={fake_params.is_ok}", - match_info={"project_uuid": fake_params.project_uuid}, - app=web.Application(), # no user! - ) - - with pytest.raises(ValidationError) as exc_info: - parse_request_parameters_as( - MyRequestParameters, request, app_storage_map=APP_KEYS_MAP - ) - - assert "user_id" in exc_info.value.errors()[0]["loc"] diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py new file mode 100644 index 00000000000..e638ed9f96d --- /dev/null +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -0,0 +1,145 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from typing import Any +from uuid import UUID + +import pytest +from aiohttp import web +from aiohttp.test_utils import make_mocked_request +from faker import Faker +from jsonschema import ValidationError +from pydantic import BaseModel, Field +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_context_as, + parse_request_parameters_as, +) +from yarl import URL + +# HELPERS +RQT_USERID_KEY = f"{__name__}.user_id" +APP_SECRET_KEY = f"{__name__}.secret" + + +class MyRequestContext(BaseModel): + user_id: int = Field(alias=RQT_USERID_KEY) + secret: str = Field(alias=APP_SECRET_KEY) + + @classmethod + def create_fake(cls, faker: Faker): + return cls(user_id=faker.pyint(), secret=faker.password()) + + +class MyRequestParameters(BaseModel): + project_uuid: UUID + is_ok: bool = True + + @classmethod + def create_fake(cls, faker: Faker): + return cls(project_uuid=faker.uuid4(), is_ok=faker.pybool()) + + +class Sub(BaseModel): + a: float = 33 + + @classmethod + def create_fake(cls, faker: Faker): + return cls(a=faker.pyfloat()) + + +class MyBody(BaseModel): + x: int + y: bool = False + z: Sub + + @classmethod + def create_fake(cls, faker: Faker): + return cls(x=faker.pyint(), y=faker.pybool(), z=Sub.create_fake(faker)) + + +def create_fake_request( + app: web.Application, params: dict[str, str], queries: dict[str, Any], body: Any +): + url = URL.build(path="/projects/{project_uuid}/".format(**params), query=queries) + + request = make_mocked_request( + "GET", + f"{url}", + match_info=params, + app=app, + payload=body, + ) + return request + + +# TESTS + + +async def test_parse_request_as( + app: web.Application, + faker: Faker, +): + + context = MyRequestContext.create_fake(faker) + params = MyRequestParameters.create_fake(faker) + body = MyBody.create_fake(faker) + + app = web.Application() + app[APP_SECRET_KEY] = context.secret + app["SKIP_THIS"] = 0 + + request = create_fake_request( + app, + params=params.dict(include={"project_uuid"}), + queries=params.dict(exclude={"project_uuid"}), + body=body.json(), + ) + request[RQT_USERID_KEY] = context.user_id + request["SKIP_ALSO_THIS"] = 0 + + # params + valid_params = parse_request_parameters_as(MyRequestParameters, request) + assert valid_params == params + + # body + valid_body = await parse_request_body_as(MyBody, request) + assert valid_body == body + + # context + valid_context = parse_request_context_as(MyRequestContext, request) + assert valid_context == context + + +async def test_parse_request_as_raises_http_error( + app: web.Application, + faker: Faker, +): + + body = MyBody.create_fake(faker) + + request = create_fake_request( + app, + params={"project_uuid": "invalid-uuid"}, + queries={}, + body={"wrong": 33}, + ) + + # params + with pytest.raises(web.HTTPBadRequest) as exc_info: + parse_request_parameters_as(MyRequestParameters, request) + + bad_request_exc = exc_info.value + assert "project_uuid" in bad_request_exc.reason + + # body + with pytest.raises(web.HTTPBadRequest) as exc_info: + await parse_request_body_as(MyBody, request) + + bad_request_exc = exc_info.value + assert "wrong" in bad_request_exc.reason + + # context + with pytest.raises(ValidationError): + parse_request_context_as(MyRequestContext, request) From 681befaef5dc122d4ba10b3cf1f228571e53baa4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Mon, 16 May 2022 14:58:36 +0200 Subject: [PATCH 17/19] requests validation refatored --- .../servicelib/aiohttp/requests_validation.py | 109 ++++---- .../tests/aiohttp/test_requests_validation.py | 251 +++++++++++++----- 2 files changed, 234 insertions(+), 126 deletions(-) diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index 8a146f674d1..76f1e20e53d 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -1,8 +1,9 @@ """ Parses and validation aiohttp requests against pydantic models -An analogous to pydantic.tools.parse_obj_as(...) for aiohttp's requests +These functions are analogous to `pydantic.tools.parse_obj_as(model_class, obj)` for aiohttp's requests """ +from contextlib import contextmanager from typing import Type, TypeVar from aiohttp import web @@ -12,43 +13,65 @@ M = TypeVar("M", bound=BaseModel) -def _convert_to_http_error_kwargs( - error: ValidationError, - reason: str, -): - details = [ - {"loc": ".".join(map(str, e["loc"])), "msg": e["msg"]} for e in error.errors() - ] +@contextmanager +def handle_validation_as_http_error(*, error_msg_template: str) -> None: + """ + Transforms ValidationError into HTTP error + """ + try: + + yield + + except ValidationError as err: + details = [ + {"loc": ".".join(map(str, e["loc"])), "msg": e["msg"]} for e in err.errors() + ] + msg = error_msg_template.format(failed=", ".join(d["loc"] for d in details)) + + raise web.HTTPBadRequest( + reason=msg, + body=json_dumps({"error": {"msg": msg, "details": details}}), + content_type="application/json", + ) + - return dict( - reason=reason.format(failed=", ".join(d["loc"] for d in details)), - body=json_dumps({"error": details}), - content_type="application/json", - ) +# NOTE: +# +# - parameters in the path are part of the resource name and therefore are required +# - parameters in the query are typically extra options like flags or filter options +# - body holds the request data +# -def parse_request_parameters_as( +def parse_request_path_parameters_as( parameters_schema: Type[M], request: web.Request, ) -> M: - """Parses path and query parameters from 'request' and validates against 'parameters_schema' + """Parses path parameters from 'request' and validates against 'parameters_schema' - :raises HTTPBadRequest if validation of parameters or queries fail + :raises HTTPBadRequest if validation of parameters fail """ - try: - params = { - **request.match_info, - **request.query, - } - return parameters_schema.parse_obj(params) + with handle_validation_as_http_error( + error_msg_template="Invalid parameter/s '{failed}' in request path" + ): + data = dict(request.match_info) + return parameters_schema.parse_obj(data) - except ValidationError as err: - raise web.HTTPBadRequest( - **_convert_to_http_error_kwargs( - err, - reason="Invalid parameters {failed} in request", - ) - ) + +def parse_request_query_parameters_as( + query_schema: Type[M], + request: web.Request, +) -> M: + """Parses query parameters from 'request' and validates against 'parameters_schema' + + :raises HTTPBadRequest if validation of queries fail + """ + + with handle_validation_as_http_error( + error_msg_template="Invalid parameter/s '{failed}' in request query" + ): + data = dict(request.query) + return query_schema.parse_obj(data) async def parse_request_body_as(model_schema: Type[M], request: web.Request) -> M: @@ -56,30 +79,8 @@ async def parse_request_body_as(model_schema: Type[M], request: web.Request) -> :raises HTTPBadRequest """ - try: + with handle_validation_as_http_error( + error_msg_template="Invalid field/s '{failed}' in request body" + ): body = await request.json() return model_schema.parse_obj(body) - - except ValidationError as err: - raise web.HTTPBadRequest( - **_convert_to_http_error_kwargs( - err, - reason="Invalid {failed} in request body", - ) - ) - - -def parse_request_context_as( - context_model_cls: Type[M], - request: web.Request, -) -> M: - """Parses and validate request context - - :raises ValidationError - """ - app_ctx = dict(request.app) - req_ctx = dict(request) - - assert set(app_ctx.keys()).intersection(req_ctx.keys()) == set() # nosec - context = {**app_ctx, **req_ctx} - return context_model_cls.parse_obj(context) diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index e638ed9f96d..6001f9ca4a1 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -2,27 +2,32 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from typing import Any +import json +from typing import Callable from uuid import UUID import pytest from aiohttp import web -from aiohttp.test_utils import make_mocked_request +from aiohttp.test_utils import TestClient from faker import Faker -from jsonschema import ValidationError -from pydantic import BaseModel, Field +from pydantic import BaseModel, Extra, Field from servicelib.aiohttp.requests_validation import ( parse_request_body_as, - parse_request_context_as, - parse_request_parameters_as, + parse_request_path_parameters_as, + parse_request_query_parameters_as, ) -from yarl import URL +from servicelib.json_serialization import json_dumps -# HELPERS +# HELPERS ----------------------------------------------------------- RQT_USERID_KEY = f"{__name__}.user_id" APP_SECRET_KEY = f"{__name__}.secret" +def jsonable_encoder(data): + # q&d replacement for fastapi.encoders.jsonable_encoder + return json.loads(json_dumps(data)) + + class MyRequestContext(BaseModel): user_id: int = Field(alias=RQT_USERID_KEY) secret: str = Field(alias=APP_SECRET_KEY) @@ -32,13 +37,28 @@ def create_fake(cls, faker: Faker): return cls(user_id=faker.pyint(), secret=faker.password()) -class MyRequestParameters(BaseModel): +class MyRequestPathParams(BaseModel): project_uuid: UUID + + class Config: + extra = Extra.forbid + + @classmethod + def create_fake(cls, faker: Faker): + return cls(project_uuid=faker.uuid4()) + + +class MyRequestQueryParams(BaseModel): is_ok: bool = True + label: str + + def as_params(self, **kwargs) -> dict[str, str]: + data = self.dict(**kwargs) + return {k: f"{v}" for k, v in data.items()} @classmethod def create_fake(cls, faker: Faker): - return cls(project_uuid=faker.uuid4(), is_ok=faker.pybool()) + return cls(is_ok=faker.pybool(), label=faker.word()) class Sub(BaseModel): @@ -59,87 +79,174 @@ def create_fake(cls, faker: Faker): return cls(x=faker.pyint(), y=faker.pybool(), z=Sub.create_fake(faker)) -def create_fake_request( - app: web.Application, params: dict[str, str], queries: dict[str, Any], body: Any -): - url = URL.build(path="/projects/{project_uuid}/".format(**params), query=queries) - - request = make_mocked_request( - "GET", - f"{url}", - match_info=params, - app=app, - payload=body, - ) - return request +# FIXTURES ---------------------------------- -# TESTS +@pytest.fixture +def client(event_loop, aiohttp_client: Callable, faker: Faker) -> TestClient: + """ + Some app that: + - creates app and request context + - has a handler that parses request params, query and body -async def test_parse_request_as( - app: web.Application, - faker: Faker, -): + """ + + async def _handler(request: web.Request) -> web.Response: + # --------- UNDER TEST ------- + # NOTE: app context does NOT need to be validated everytime! + context = MyRequestContext.parse_obj({**dict(request.app), **dict(request)}) + + path_params = parse_request_path_parameters_as(MyRequestPathParams, request) + query_params = parse_request_query_parameters_as(MyRequestQueryParams, request) + body = await parse_request_body_as(MyBody, request) + # --------------------------- - context = MyRequestContext.create_fake(faker) - params = MyRequestParameters.create_fake(faker) - body = MyBody.create_fake(faker) + return web.json_response( + { + "parameters": path_params.dict(), + "queries": query_params.dict(), + "body": body.dict(), + "context": context.dict(), + }, + dumps=json_dumps, + ) - app = web.Application() - app[APP_SECRET_KEY] = context.secret - app["SKIP_THIS"] = 0 + # --- - request = create_fake_request( - app, - params=params.dict(include={"project_uuid"}), - queries=params.dict(exclude={"project_uuid"}), - body=body.json(), + @web.middleware + async def _middleware(request: web.Request, handler): + # request context + request[RQT_USERID_KEY] = 42 + request["RQT_IGNORE_CONTEXT"] = "not interesting" + resp = await handler(request) + return resp + + app = web.Application( + middlewares=[ + _middleware, + ] ) - request[RQT_USERID_KEY] = context.user_id - request["SKIP_ALSO_THIS"] = 0 - # params - valid_params = parse_request_parameters_as(MyRequestParameters, request) - assert valid_params == params + # app context + app[APP_SECRET_KEY] = faker.password() + app["APP_IGNORE_CONTEXT"] = "not interesting" + + # adds handler + app.add_routes([web.get("/projects/{project_uuid}", _handler)]) + + return event_loop.run_until_complete(aiohttp_client(app)) + - # body - valid_body = await parse_request_body_as(MyBody, request) - assert valid_body == body +@pytest.fixture +def path_params(faker: Faker): + path_params = MyRequestPathParams.create_fake(faker) + return path_params - # context - valid_context = parse_request_context_as(MyRequestContext, request) - assert valid_context == context +@pytest.fixture +def query_params(faker: Faker) -> MyRequestQueryParams: + return MyRequestQueryParams.create_fake(faker) -async def test_parse_request_as_raises_http_error( - app: web.Application, - faker: Faker, + +@pytest.fixture +def body(faker: Faker) -> MyBody: + return MyBody.create_fake(faker) + + +# TESTS ------------------------------------------------------ + + +async def test_parse_request_as( + client: TestClient, + path_params: MyRequestPathParams, + query_params: MyRequestQueryParams, + body: MyBody, ): - body = MyBody.create_fake(faker) + r = await client.get( + f"/projects/{path_params.project_uuid}", + params=query_params.as_params(), + json=body.dict(), + ) + assert r.status == web.HTTPOk.status_code, f"{await r.text()}" + + got = await r.json() + + assert got["parameters"] == jsonable_encoder(path_params.dict()) + assert got["queries"] == jsonable_encoder(query_params.dict()) + assert got["body"] == body.dict() + assert got["context"] == { + "secret": client.app[APP_SECRET_KEY], + "user_id": 42, + } + - request = create_fake_request( - app, - params={"project_uuid": "invalid-uuid"}, - queries={}, - body={"wrong": 33}, +async def test_parse_request_with_invalid_path_params( + client: TestClient, + query_params: MyRequestQueryParams, + body: MyBody, +): + + r = await client.get( + "/projects/invalid-uuid", + params=query_params.as_params(), + json=body.dict(), ) + assert r.status == web.HTTPBadRequest.status_code, f"{await r.text()}" - # params - with pytest.raises(web.HTTPBadRequest) as exc_info: - parse_request_parameters_as(MyRequestParameters, request) + errors = await r.json() + assert errors == { + "error": { + "msg": "Invalid parameter/s 'project_uuid' in request path", + "details": [{"loc": "project_uuid", "msg": "value is not a valid uuid"}], + } + } - bad_request_exc = exc_info.value - assert "project_uuid" in bad_request_exc.reason - # body - with pytest.raises(web.HTTPBadRequest) as exc_info: - await parse_request_body_as(MyBody, request) +async def test_parse_request_with_invalid_query_params( + client: TestClient, + path_params: MyRequestPathParams, + body: MyBody, +): + + r = await client.get( + f"/projects/{path_params.project_uuid}", + params={}, + json=body.dict(), + ) + assert r.status == web.HTTPBadRequest.status_code, f"{await r.text()}" - bad_request_exc = exc_info.value - assert "wrong" in bad_request_exc.reason + errors = await r.json() + assert errors == { + "error": { + "msg": "Invalid parameter/s 'label' in request query", + "details": [{"loc": "label", "msg": "field required"}], + } + } - # context - with pytest.raises(ValidationError): - parse_request_context_as(MyRequestContext, request) + +async def test_parse_request_with_invalid_body( + client: TestClient, + path_params: MyRequestPathParams, + query_params: MyRequestQueryParams, +): + + r = await client.get( + f"/projects/{path_params.project_uuid}", + params=query_params.as_params(), + json={"invalid": "body"}, + ) + assert r.status == web.HTTPBadRequest.status_code, f"{await r.text()}" + + errors = await r.json() + + assert errors == { + "error": { + "msg": "Invalid field/s 'x, z' in request body", + "details": [ + {"loc": "x", "msg": "field required"}, + {"loc": "z", "msg": "field required"}, + ], + } + } From d1f879b6835fe8972f6ccf650f91a373558a6134 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Mon, 16 May 2022 15:30:54 +0200 Subject: [PATCH 18/19] users validation in project-handlers-crud --- .../projects/projects_handlers_crud.py | 247 ++++++++---------- 1 file changed, 106 insertions(+), 141 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py index 0d620391c47..421219366e1 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers_crud.py @@ -1,5 +1,6 @@ """ Handlers for STANDARD methods on /projects colletions +Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete """ import asyncio @@ -9,16 +10,18 @@ from typing import Any, Coroutine, Dict, List, Optional, Set from uuid import UUID -import orjson from aiohttp import web from jsonschema import ValidationError as JsonSchemaValidationError from models_library.basic_types import UUIDStr -from models_library.projects import ProjectID -from models_library.projects_access import AccessRights, GroupIDStr from models_library.projects_state import ProjectStatus -from models_library.rest_pagination import Page +from models_library.rest_pagination import Page, PageMetaInfoLimitOffset from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, EmailStr, Extra, Field, HttpUrl, ValidationError +from models_library.users import UserID +from pydantic import BaseModel, Field +from servicelib.aiohttp.requests_validation import ( + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) from servicelib.json_serialization import json_dumps from servicelib.utils import logged_gather from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB @@ -33,7 +36,6 @@ from ..security_decorators import permission_required from ..storage_api import copy_data_folders_from_project from ..users_api import get_user_name -from ..utils import snake_to_camel from . import projects_api from .project_models import ProjectDict, ProjectTypeAPI from .projects_db import APP_PROJECT_DBAPI, ProjectDBAPI @@ -65,61 +67,29 @@ routes = web.RouteTableDef() -# -# Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete -# - Get https://google.aip.dev/131 -# - List https://google.aip.dev/132 -# - Create https://google.aip.dev/133 -# - Update https://google.aip.dev/134 -# - Delete https://google.aip.dev/135 -# - - -# API MODELS ------------------------ -class _ProjectCreateQuery(BaseModel): - template_uuid: Optional[UUIDStr] = None - as_template: Optional[UUIDStr] = None - copy_data: bool = True - hidden: bool = False - - -class ProjectCreate(BaseModel): - """ - -> POST /projects (ProjectCreate) - - - resource ID (i.e. project's uuid) is defined in the *backend* on creation - """ +class BaseRequestContext(BaseModel): + user_id: UserID = Field(..., alias=RQT_USERID_KEY) + product_name: str = Field(..., alias=RQ_PRODUCT_KEY) - name: str - description: str - thumbnail: Optional[HttpUrl] = None - # TODO: why these are necessary? - prj_owner: EmailStr = Field(..., description="user's email of owner") - access_rights: Dict[GroupIDStr, AccessRights] = Field(...) +class ProjectPathParams(BaseModel): + project_uuid: UUID = Field(..., alias="project_id") class Config: - extra = Extra.ignore # error tolerant - alias_generator = snake_to_camel - json_loads = orjson.loads - json_dumps = json_dumps - + allow_population_by_field_name = True -class ProjectGet(BaseModel): - name: str - description: str - thumbnail: HttpUrl = "" - prj_owner: EmailStr = Field(..., description="user's email of owner") - access_rights: Dict[GroupIDStr, AccessRights] = Field(...) - class Config: - extra = Extra.allow - alias_generator = snake_to_camel - json_dumps = json_dumps +# +# - Create https://google.aip.dev/133 +# -# HANDLERS ------------------------ +class _ProjectCreateParams(BaseModel): + template_uuid: Optional[UUIDStr] = None + as_template: Optional[UUIDStr] = None + copy_data: bool = True + hidden: bool = False @routes.post(f"/{VTAG}/projects") @@ -138,19 +108,9 @@ async def create_projects(request: web.Request): """ # SEE https://google.aip.dev/133 - # FIXME: too-many-branches - # FIXME: too-many-statements - - # request context params - user_id: int = request[RQT_USERID_KEY] db: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] - - # query params - try: - q = _ProjectCreateQuery.parse_obj(*request.query) - except ValidationError as err: - # TODO: impove error. which parameter failed and why? - raise web.HTTPBadRequest(reason=f"Invalid query parameters: {err}") + c = BaseRequestContext.parse_obj(request) + q = parse_request_query_parameters_as(_ProjectCreateParams, request) new_project = {} try: @@ -163,7 +123,7 @@ async def create_projects(request: web.Request): source_project = await projects_api.get_project_for_user( request.app, project_uuid=q.as_template, - user_id=user_id, + user_id=c.user_id, include_templates=False, ) elif q.template_uuid: # create from template @@ -187,7 +147,7 @@ async def create_projects(request: web.Request): q.hidden = q.copy_data clone_data_coro = ( copy_data_folders_from_project( - request.app, source_project, new_project, nodes_map, user_id + request.app, source_project, new_project, nodes_map, c.user_id ) if q.copy_data else None @@ -212,7 +172,7 @@ async def create_projects(request: web.Request): # update metadata (uuid, timestamps, ownership) and save new_project = await db.add_project( new_project, - user_id, + c.user_id, force_as_template=q.as_template is not None, hidden=q.hidden, ) @@ -226,8 +186,8 @@ async def create_projects(request: web.Request): request.app, source_project["uuid"], ProjectStatus.CLONING, - user_id, - await get_user_name(request.app, user_id), + c.user_id, + await get_user_name(request.app, c.user_id), ): await clone_data_coro @@ -245,12 +205,12 @@ async def create_projects(request: web.Request): # This is a new project and every new graph needs to be reflected in the pipeline tables await director_v2_api.create_or_update_pipeline( - request.app, user_id, new_project["uuid"] + request.app, c.user_id, new_project["uuid"] ) # Appends state new_project = await projects_api.add_project_states_for_user( - user_id=user_id, + user_id=c.user_id, project=new_project, is_template=q.as_template is not None, app=request.app, @@ -264,10 +224,10 @@ async def create_projects(request: web.Request): raise web.HTTPUnauthorized from exc except asyncio.CancelledError: log.warning( - "cancelled creation of project for user '%s', cleaning up", f"{user_id=}" + "cancelled creation of project for user '%s', cleaning up", f"{c.user_id=}" ) await projects_api.submit_delete_project_task( - request.app, new_project["uuid"], user_id + request.app, new_project["uuid"], c.user_id ) raise else: @@ -277,23 +237,24 @@ async def create_projects(request: web.Request): ) +# +# - List https://google.aip.dev/132 +# + + +class _ProjectListParams(PageMetaInfoLimitOffset): + project_type: ProjectTypeAPI = Field(ProjectTypeAPI.all, alias="type") + show_hidden: bool + + @routes.get(f"/{VTAG}/projects") @login_required @permission_required("project.read") async def list_projects(request: web.Request): - # TODO: implement all query parameters as - # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html - from servicelib.aiohttp.rest_utils import extract_and_validate - - user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] - _, query, _ = await extract_and_validate(request) - - project_type = ProjectTypeAPI(query["type"]) - offset = query["offset"] - limit = query["limit"] - show_hidden = query["show_hidden"] - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] + db: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] + c = BaseRequestContext.parse_obj(request) + q = parse_request_query_parameters_as(_ProjectListParams, request) async def set_all_project_states( projects: List[Dict[str, Any]], project_types: List[ProjectTypeDB] @@ -301,7 +262,7 @@ async def set_all_project_states( await logged_gather( *[ projects_api.add_project_states_for_user( - user_id=user_id, + user_id=c.user_id, project=prj, is_template=prj_type == ProjectTypeDB.TEMPLATE, app=request.app, @@ -315,16 +276,16 @@ async def set_all_project_states( user_available_services: List[ Dict ] = await catalog.get_services_for_user_in_product( - request.app, user_id, product_name, only_key_versions=True + request.app, c.user_id, c.product_name, only_key_versions=True ) projects, project_types, total_number_projects = await db.load_projects( - user_id=user_id, - filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), + user_id=c.user_id, + filter_by_project_type=ProjectTypeAPI.to_project_type_db(q.project_type), filter_by_services=user_available_services, - offset=offset, - limit=limit, - include_hidden=show_hidden, + offset=q.offset, + limit=q.limit, + include_hidden=q.show_hidden, ) await set_all_project_states(projects, project_types) page = Page[ProjectDict].parse_obj( @@ -332,8 +293,8 @@ async def set_all_project_states( chunk=projects, request_url=request.url, total=total_number_projects, - limit=limit, - offset=offset, + limit=q.limit, + offset=q.offset, ) ) return web.Response( @@ -342,29 +303,32 @@ async def set_all_project_states( ) +# +# - Get https://google.aip.dev/131 +# + + @routes.get(f"/{VTAG}/projects/{{project_uuid}}") @login_required @permission_required("project.read") async def get_project(request: web.Request): """Returns all projects accessible to a user (not necesarly owned)""" # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! - user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY] - try: - project_uuid = request.match_info["project_id"] - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + + c = BaseRequestContext.parse_obj(request) + p = parse_request_path_parameters_as(ProjectPathParams, request) user_available_services: List[ Dict ] = await catalog.get_services_for_user_in_product( - request.app, user_id, product_name, only_key_versions=True + request.app, c.user_id, c.product_name, only_key_versions=True ) try: project = await projects_api.get_project_for_user( request.app, - project_uuid=project_uuid, - user_id=user_id, + project_uuid=p.project_uuid, + user_id=c.user_id, include_templates=True, include_state=True, ) @@ -378,7 +342,7 @@ async def get_project(request: web.Request): # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden raise web.HTTPNotFound( reason=( - f"Project '{project_uuid}' uses unavailable services. Please ask " + f"Project '{p.project_uuid}' uses unavailable services. Please ask " f"for permission for the following services {formatted_services}" ) ) @@ -390,10 +354,15 @@ async def get_project(request: web.Request): except ProjectInvalidRightsError as exc: raise web.HTTPForbidden( - reason=f"You do not have sufficient rights to read project {project_uuid}" + reason=f"You do not have sufficient rights to read project {p.project_uuid}" ) from exc except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc + raise web.HTTPNotFound(reason=f"Project {p.project_uuid} not found") from exc + + +# +# - Update https://google.aip.dev/134 +# @routes.put(f"/{VTAG}/projects/{{project_uuid}}") @@ -415,32 +384,25 @@ async def replace_project(request: web.Request): :raises web.HTTPNotFound: cannot find project id in repository """ - user_id: int = request[RQT_USERID_KEY] + db: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] + c = BaseRequestContext.parse_obj(request) + p = parse_request_path_parameters_as(ProjectPathParams, request) + try: - project_uuid = ProjectID(request.match_info["project_id"]) new_project = await request.json() - # Prune state field (just in case) new_project.pop("state", None) - except AttributeError as err: - # NOTE: if new_project is not a dict, .pop will raise this error - raise web.HTTPBadRequest( - reason="Invalid request payload, expected a project model" - ) from err - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err except json.JSONDecodeError as exc: raise web.HTTPBadRequest(reason="Invalid request body") from exc - db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI] await check_permission( request, "project.update | project.workbench.node.inputs.update", context={ "dbapi": db, - "project_id": f"{project_uuid}", - "user_id": user_id, + "project_id": f"{p.project_uuid}", + "user_id": c.user_id, "new_data": new_project, }, ) @@ -450,8 +412,8 @@ async def replace_project(request: web.Request): current_project = await projects_api.get_project_for_user( request.app, - project_uuid=f"{project_uuid}", - user_id=user_id, + project_uuid=f"{p.project_uuid}", + user_id=c.user_id, include_templates=True, include_state=True, ) @@ -460,7 +422,7 @@ async def replace_project(request: web.Request): await check_permission(request, "project.access_rights.update") if await director_v2_api.is_pipeline_running( - request.app, user_id, project_uuid + request.app, c.user_id, p.project_uuid ): if any_node_inputs_changed(new_project, current_project): @@ -483,19 +445,19 @@ async def replace_project(request: web.Request): # and resubmit the request (front-end will show a pop-up with message below) # raise web.HTTPConflict( - reason=f"Project {project_uuid} cannot be modified while pipeline is still running." + reason=f"Project {p.project_uuid} cannot be modified while pipeline is still running." ) new_project = await db.replace_user_project( - new_project, user_id, f"{project_uuid}", include_templates=True + new_project, c.user_id, f"{p.project_uuid}", include_templates=True ) - await director_v2_api.projects_networks_update(request.app, project_uuid) + await director_v2_api.projects_networks_update(request.app, p.project_uuid) await director_v2_api.create_or_update_pipeline( - request.app, user_id, project_uuid + request.app, c.user_id, p.project_uuid ) # Appends state new_project = await projects_api.add_project_states_for_user( - user_id=user_id, + user_id=c.user_id, project=new_project, is_template=False, app=request.app, @@ -517,47 +479,50 @@ async def replace_project(request: web.Request): return web.json_response({"data": new_project}, dumps=json_dumps) +# +# - Delete https://google.aip.dev/135 +# + + @routes.delete(f"/{VTAG}/projects/{{project_uuid}}") @login_required @permission_required("project.delete") async def delete_project(request: web.Request): - # first check if the project exists - user_id: int = request[RQT_USERID_KEY] - try: - project_uuid = request.match_info["project_id"] - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + c = BaseRequestContext.parse_obj(request) + p = parse_request_path_parameters_as(ProjectPathParams, request) try: await projects_api.get_project_for_user( request.app, - project_uuid=project_uuid, - user_id=user_id, + project_uuid=f"{p.project_uuid}", + user_id=c.user_id, include_templates=True, ) project_users: Set[int] = set() - with managed_resource(user_id, None, request.app) as rt: + with managed_resource(c.user_id, None, request.app) as rt: project_users = { user_session.user_id for user_session in await rt.find_users_of_resource( - PROJECT_ID_KEY, project_uuid + PROJECT_ID_KEY, f"{p.project_uuid}" ) } # that project is still in use - if user_id in project_users: + if c.user_id in project_users: raise web.HTTPForbidden( - reason="Project is still open in another tab/browser. It cannot be deleted until it is closed." + reason="Project is still open in another tab/browser." + "It cannot be deleted until it is closed." ) if project_users: other_user_names = { await get_user_name(request.app, uid) for uid in project_users } raise web.HTTPForbidden( - reason=f"Project is open by {other_user_names}. It cannot be deleted until the project is closed." + reason=f"Project is open by {other_user_names}. " + "It cannot be deleted until the project is closed." ) await projects_api.submit_delete_project_task( - request.app, ProjectID(project_uuid), user_id + request.app, p.project_uuid, c.user_id ) except ProjectInvalidRightsError as err: @@ -565,6 +530,6 @@ async def delete_project(request: web.Request): reason="You do not have sufficient rights to delete this project" ) from err except ProjectNotFoundError as err: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from err + raise web.HTTPNotFound(reason=f"Project {p.project_uuid} not found") from err raise web.HTTPNoContent(content_type="application/json") From 79319d0b2158ec81ae0615f35ae543893b380724 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Mon, 16 May 2022 15:40:19 +0200 Subject: [PATCH 19/19] uses validation in rest project handlers --- .../projects/projects_handlers.py | 132 ++++++++++++------ 1 file changed, 90 insertions(+), 42 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index bc26da782e9..f7455a99933 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -10,40 +10,46 @@ from aiohttp import web from models_library.projects_state import ProjectState +from pydantic import BaseModel, Field +from servicelib.aiohttp.requests_validation import ( + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) from servicelib.aiohttp.web_exceptions_extension import HTTPLocked from servicelib.json_serialization import json_dumps from .._meta import api_version_prefix as VTAG from ..director_v2_core import DirectorServiceError -from ..login.decorators import RQT_USERID_KEY, login_required +from ..login.decorators import login_required from ..resource_manager.websocket_manager import PROJECT_ID_KEY, managed_resource from ..security_decorators import permission_required -from . import projects_api +from . import _create, projects_api from .projects_exceptions import ProjectNotFoundError -from .projects_handlers_crud import routes +from .projects_handlers_crud import BaseRequestContext, ProjectPathParams, routes log = logging.getLogger(__name__) + # -# Singleton per-session resources https://google.aip.dev/156 +# singleton: Active project +# - Singleton per-session resources https://google.aip.dev/156 # +class _ProjectActiveParams(BaseModel): + client_session_id: str + + @routes.get(f"/{VTAG}/projects/active") @login_required @permission_required("project.read") async def get_active_project(request: web.Request) -> web.Response: - user_id: int = request[RQT_USERID_KEY] - - try: - client_session_id = request.query["client_session_id"] + p = parse_request_query_parameters_as(_ProjectActiveParams, request) - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err try: project = None user_active_projects = [] - with managed_resource(user_id, client_session_id, request.app) as rt: + with managed_resource(p.user_id, p.client_session_id, request.app) as rt: # get user's projects user_active_projects = await rt.find(PROJECT_ID_KEY) if user_active_projects: @@ -51,7 +57,7 @@ async def get_active_project(request: web.Request) -> web.Response: project = await projects_api.get_project_for_user( request.app, project_uuid=user_active_projects[0], - user_id=user_id, + user_id=p.user_id, include_templates=True, include_state=True, ) @@ -63,7 +69,41 @@ async def get_active_project(request: web.Request) -> web.Response: # -# Custom methods https://google.aip.dev/136 +# clone: custom methods https://google.aip.dev/136 +# + + +class _ProjectCloneParams(BaseModel): + copy_data: bool = True + as_template: bool = Field( + default=False, description="Enforces clone to be a template" + ) + + +@routes.post(f"/{VTAG}/projects/{{project_uuid}}:clone") +@login_required +@permission_required("project.create") +@permission_required("services.pipeline.*") # due to update_pipeline_db +async def clone_project(request: web.Request): + c = BaseRequestContext.parse_obj(request) + p = parse_request_path_parameters_as(ProjectPathParams, request) + q = parse_request_query_parameters_as(_ProjectCloneParams, request) + + project = await _create.clone_project( + request.app, + c.user_id, + source_project_id=p.project_uuid, + copy_data=q.copy_data, + as_template=q.as_template, + ) + + # TODO: see create! + + return web.json_response({"data": project}) + + +# +# open project: custom methods https://google.aip.dev/136 # @@ -71,38 +111,40 @@ async def get_active_project(request: web.Request) -> web.Response: @login_required @permission_required("project.open") async def open_project(request: web.Request) -> web.Response: - user_id: int = request[RQT_USERID_KEY] + c = BaseRequestContext.parse_obj(request) + p = parse_request_path_parameters_as(ProjectPathParams, request) + try: - project_uuid = request.match_info["project_id"] client_session_id = await request.json() - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err + except json.JSONDecodeError as exc: raise web.HTTPBadRequest(reason="Invalid request body") from exc try: project = await projects_api.get_project_for_user( request.app, - project_uuid=project_uuid, - user_id=user_id, + project_uuid=f"{p.project_uuid}", + user_id=c.user_id, include_templates=False, include_state=True, ) if not await projects_api.try_open_project_for_user( - user_id, - project_uuid=project_uuid, + p.user_id, + project_uuid=f"{p.project_uuid}", client_session_id=client_session_id, app=request.app, ): raise HTTPLocked(reason="Project is locked, try later") # user id opened project uuid - await projects_api.start_project_interactive_services(request, project, user_id) + await projects_api.start_project_interactive_services( + request, project, c.user_id + ) # notify users that project is now opened project = await projects_api.add_project_states_for_user( - user_id=user_id, + user_id=c.user_id, project=project, is_template=False, app=request.app, @@ -113,13 +155,13 @@ async def open_project(request: web.Request) -> web.Response: return web.json_response({"data": project}, dumps=json_dumps) except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc + raise web.HTTPNotFound(reason=f"Project {p.project_uuid} not found") from exc except DirectorServiceError as exc: # there was an issue while accessing the director-v2/director-v0 # ensure the project is closed again await projects_api.try_close_project_for_user( - user_id=user_id, - project_uuid=project_uuid, + user_id=c.user_id, + project_uuid=f"{p.project_uuid}", client_session_id=client_session_id, app=request.app, ) @@ -128,17 +170,22 @@ async def open_project(request: web.Request) -> web.Response: ) from exc +# +# close project: custom methods https://google.aip.dev/136 +# + + @routes.post(f"/{VTAG}/projects/{{project_uuid}}:close") @login_required @permission_required("project.close") async def close_project(request: web.Request) -> web.Response: - user_id: int = request[RQT_USERID_KEY] + + c = BaseRequestContext.parse_obj(request) + p = parse_request_path_parameters_as(ProjectPathParams, request) + try: - project_uuid = request.match_info["project_id"] client_session_id = await request.json() - except KeyError as err: - raise web.HTTPBadRequest(reason=f"Invalid request parameter {err}") from err except json.JSONDecodeError as exc: raise web.HTTPBadRequest(reason="Invalid request body") from exc @@ -146,35 +193,36 @@ async def close_project(request: web.Request) -> web.Response: # ensure the project exists await projects_api.get_project_for_user( request.app, - project_uuid=project_uuid, - user_id=user_id, + project_uuid=f"{p.project_uuid}", + user_id=c.user_id, include_templates=False, include_state=False, ) await projects_api.try_close_project_for_user( - user_id, project_uuid, client_session_id, request.app + c.user_id, f"{p.project_uuid}", client_session_id, request.app ) raise web.HTTPNoContent(content_type="application/json") except ProjectNotFoundError as exc: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc + raise web.HTTPNotFound(reason=f"Project {p.project_uuid} not found") from exc + + +# +# project's state sub-resource +# @routes.get(f"/{VTAG}/projects/{{project_uuid}}/state") @login_required @permission_required("project.read") async def state_project(request: web.Request) -> web.Response: - from servicelib.aiohttp.rest_utils import extract_and_validate - - user_id: int = request[RQT_USERID_KEY] - - path, _, _ = await extract_and_validate(request) - project_uuid = path["project_id"] + c = BaseRequestContext.parse_obj(request) + p = parse_request_path_parameters_as(ProjectPathParams, request) # check that project exists and queries state validated_project = await projects_api.get_project_for_user( request.app, - project_uuid=project_uuid, - user_id=user_id, + project_uuid=f"{p.project_uuid}", + user_id=c.user_id, include_templates=True, include_state=True, )