From befe688ee2f886b0b98b9d3e288ddf8b80eba1d9 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 10:41:50 +0200 Subject: [PATCH 01/13] holds task var to app's lifespam --- .../src/simcore_service_webserver/garbage_collector_task.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py index 4ce9f4876c1..e987ea05127 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py @@ -17,6 +17,7 @@ GC_TASK_NAME = f"background-task.{__name__}.collect_garbage_periodically" GC_TASK_CONFIG = f"{GC_TASK_NAME}.config" +GC_TASK = f"{GC_TASK_NAME}.task" async def run_background_task(app: web.Application): @@ -29,6 +30,8 @@ async def run_background_task(app: web.Application): gc_bg_task = asyncio.create_task( collect_garbage_periodically(app), name=GC_TASK_NAME ) + # attaches variable to the app's lifetime + app[GC_TASK] = gc_bg_task # FIXME: added this config to overcome the state in which the # task cancelation is ignored and the exceptions enter in a loop From 5d6b69feba93d511586e1bed442f5661fe8b7d8e Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 11:00:22 +0200 Subject: [PATCH 02/13] logs GC cycle --- .../garbage_collector_core.py | 22 +++++++++---------- .../garbage_collector_task.py | 16 +++++++++----- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 66b90c9fd1b..75f0152a7f7 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -5,7 +5,7 @@ import asyncio import logging from itertools import chain -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any, Optional import asyncpg.exceptions from aiohttp import web @@ -61,8 +61,6 @@ async def collect_garbage(app: web.Application): The field `garbage_collection_interval_seconds` defines the interval at which this function will be called. """ - logger.info("Collecting garbage...") - registry: RedisResourceRegistry = get_registry(app) # Removes disconnected user resources @@ -248,7 +246,7 @@ async def remove_users_manually_marked_as_guests( skip_users.add(int(entry["user_id"])) # Prevent creating this list if a guest user - guest_users: List[Tuple[int, str]] = await get_guest_user_ids_and_names(app) + guest_users: list[tuple[int, str]] = await get_guest_user_ids_and_names(app) for guest_user_id, guest_user_name in guest_users: # Prevents removing GUEST users that were automatically (NOT manually) created @@ -291,8 +289,8 @@ async def remove_users_manually_marked_as_guests( async def _remove_single_orphaned_service( app: web.Application, - interactive_service: Dict[str, Any], - currently_opened_projects_node_ids: Set[str], + interactive_service: dict[str, Any], + currently_opened_projects_node_ids: set[str], ) -> None: service_host = interactive_service["service_host"] # if not present in DB or not part of currently opened projects, can be removed @@ -378,7 +376,7 @@ async def remove_orphaned_services( """ logger.debug("Starting orphaned services removal...") - currently_opened_projects_node_ids: Set[str] = set() + currently_opened_projects_node_ids: set[str] = set() alive_keys, _ = await registry.get_all_resource_keys() for alive_key in alive_keys: resources = await registry.get_resources(alive_key) @@ -389,11 +387,11 @@ async def remove_orphaned_services( node_ids = await get_workbench_node_ids_from_project_uuid(app, project_uuid) currently_opened_projects_node_ids.update(node_ids) - running_interactive_services: List[Dict[str, Any]] = [] + running_interactive_services: list[dict[str, Any]] = [] try: running_interactive_services = await director_v2_api.get_services(app) except director_v2_api.DirectorServiceError: - logger.debug(("Could not fetch running_interactive_services")) + logger.debug("Could not fetch running_interactive_services") logger.info( "Currently running services %s", @@ -434,7 +432,7 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N """ # recover user's primary_gid try: - project_owner: Dict = await get_user(app=app, user_id=user_id) + project_owner: dict = await get_user(app=app, user_id=user_id) except users_exceptions.UserNotFoundError: logger.warning( "Could not recover user data for user '%s', stopping removal of projects!", @@ -456,11 +454,11 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N f"{user_project_uuids=}", ) - delete_tasks: List[asyncio.Task] = [] + delete_tasks: list[asyncio.Task] = [] for project_uuid in user_project_uuids: try: - project: Dict = await get_project_for_user( + project: dict = await get_project_for_user( app=app, project_uuid=project_uuid, user_id=user_id, diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py index e987ea05127..89deb8670d8 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py @@ -62,34 +62,40 @@ async def run_background_task(app: web.Application): async def collect_garbage_periodically(app: web.Application): - + LOG_PREFIX = "Garbage Collector cycle " settings: GarbageCollectorSettings = get_plugin_settings(app) while True: - logger.info("Starting garbage collector...") try: interval = settings.GARBAGE_COLLECTOR_INTERVAL_S while True: + logger.info(LOG_PREFIX + "starting ...") + await collect_garbage(app) if app[GC_TASK_CONFIG].get("force_stop", False): - raise Exception("Forced to stop garbage collection") + raise RuntimeError("Forced to stop garbage collection") + logger.info(LOG_PREFIX + "completed: pausing for %ss", f"{interval=}") await asyncio.sleep(interval) except asyncio.CancelledError: - logger.info("Garbage collection task was cancelled, it will not restart!") + logger.info( + LOG_PREFIX + "stopped:" + "Garbage collection task was cancelled, it will not restart!" + ) # do not catch Cancellation errors raise except Exception: # pylint: disable=broad-except logger.warning( + LOG_PREFIX + "stopped:" "There was an error during garbage collection, restarting...", exc_info=True, ) if app[GC_TASK_CONFIG].get("force_stop", False): - logger.warning("Forced to stop garbage collection") + logger.warning(LOG_PREFIX + ": Forced to stop garbage collection") break # will wait 5 seconds to recover before restarting to avoid restart loops From f95940eecd86a557e549f2007205425a2d2dfff7 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 12:53:14 +0200 Subject: [PATCH 03/13] Fixes wrong name argument while raising DynamicSidecarError --- .../modules/dynamic_sidecar/docker_api.py | 20 +++++++------------ 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api.py index 0d3baaac57a..86f41785ce0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api.py @@ -72,10 +72,8 @@ async def get_swarm_network(dynamic_sidecar_settings: DynamicSidecarSettings) -> ] if not networks or len(networks) > 1: raise DynamicSidecarError( - ( - f"Swarm network name (searching for '*{network_name}*') is not configured." - f"Found following networks: {networks}" - ) + f"Swarm network name (searching for '*{network_name}*') is not configured." + f"Found following networks: {networks}" ) return networks[0] @@ -116,7 +114,7 @@ async def create_service_and_get_id(create_service_data: AioDockerServiceSpec) - if "ID" not in service_start_result: raise DynamicSidecarError( - "Error while starting service: {}".format(str(service_start_result)) + f"Error while starting service: {str(service_start_result)}" ) return service_start_result["ID"] @@ -162,10 +160,8 @@ async def _sleep_or_error(started: float, task: dict): > dynamic_sidecar_settings.DYNAMIC_SIDECAR_TIMEOUT_FETCH_DYNAMIC_SIDECAR_NODE_ID ): raise DynamicSidecarError( - msg=( - "Timed out while searching for an assigned NodeID for " - f"service_id={service_id}. Last task inspect result: {task}" - ) + "Timed out while searching for an assigned NodeID for " + f"service_id={service_id}. Last task inspect result: {task}" ) async with docker_client() as client: @@ -219,10 +215,8 @@ async def get_service_placement( if "NodeID" not in task: raise DynamicSidecarError( - msg=( - f"Could not find an assigned NodeID for service_id={service_id}. " - f"Last task inspect result: {task}" - ) + f"Could not find an assigned NodeID for service_id={service_id}. " + f"Last task inspect result: {task}" ) return task["NodeID"] From 4261f02716e836be5d7057b18fc1be9a25fe0fd4 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 13:44:10 +0200 Subject: [PATCH 04/13] improves logging in GC --- .../garbage_collector_core.py | 85 +++++++++++-------- 1 file changed, 50 insertions(+), 35 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 75f0152a7f7..76c6f445247 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -4,8 +4,9 @@ import asyncio import logging +from contextlib import contextmanager from itertools import chain -from typing import Any, Optional +from typing import Any, Callable, Optional import asyncpg.exceptions from aiohttp import web @@ -40,6 +41,18 @@ logger = logging.getLogger(__name__) +@contextmanager +def log_context(log: Callable, message: str): + try: + log("%s [STARTING]", message) + + yield + + log("%s [DONE-SUCCEED]", message) + except Exception as e: # pylint: disable=broad-except + log("%s [DONE-FAILED with %s]", message, type(e)) + + async def collect_garbage(app: web.Application): """ Garbage collection has the task of removing trash (i.e. unused resources) from the system. The trash @@ -63,25 +76,26 @@ async def collect_garbage(app: web.Application): """ registry: RedisResourceRegistry = get_registry(app) - # Removes disconnected user resources - # Triggers signal to close possible pending opened projects - # Removes disconnected GUEST users after they finished their sessions - await remove_disconnected_user_resources(registry, app) + with log_context(logger.info, "Step 1: Removes disconnected user resources"): + # Triggers signal to close possible pending opened projects + # Removes disconnected GUEST users after they finished their sessions + await remove_disconnected_user_resources(registry, app) - # Users manually marked for removal: - # if a user was manually marked as GUEST it needs to be - # removed together with all the associated projects - await remove_users_manually_marked_as_guests(registry, app) + with log_context(logger.info, "Step 2: Removes users manually marked for removal"): + # if a user was manually marked as GUEST it needs to be + # removed together with all the associated projects + await remove_users_manually_marked_as_guests(registry, app) - # For various reasons, some services remain pending after - # the projects are closed or the user was disconencted. - # This will close and remove all these services from - # the cluster, thus freeing important resources. + with log_context(logger.info, "Step 3: Removes orphaned services"): + # For various reasons, some services remain pending after + # the projects are closed or the user was disconencted. + # This will close and remove all these services from + # the cluster, thus freeing important resources. - # Temporary disabling GC to until the dynamic service - # safe function is invoked by the GC. This will avoid - # data loss for current users. - await remove_orphaned_services(registry, app) + # Temporary disabling GC to until the dynamic service + # safe function is invoked by the GC. This will avoid + # data loss for current users. + await remove_orphaned_services(registry, app) async def remove_disconnected_user_resources( @@ -123,9 +137,9 @@ async def remove_disconnected_user_resources( if await lock_manager.lock( GUEST_USER_RC_LOCK_FORMAT.format(user_id=user_id) ).locked(): - logger.debug( - "Skipping garbage-collecting user '%d' since it is still locked", - user_id, + logger.info( + "Skipping garbage-collecting %s since it is still locked", + f"{user_id=}", ) continue @@ -136,10 +150,9 @@ async def remove_disconnected_user_resources( continue # (1,2) CAREFULLY releasing every resource acquired by the expired key - logger.debug( - "Key '%s' expired. Cleaning the following resources: '%s'", - dead_key, - dead_key_resources, + logger.info( + "%s expired. Checking resources to cleanup", + f"{dead_key=}", ) for resource_name, resource_value in dead_key_resources.items(): @@ -173,10 +186,10 @@ async def remove_disconnected_user_resources( # (1) releasing acquired resources logger.info( - "(1) Releasing resource %s:%s acquired by expired key %s", - resource_name, - resource_value, - dead_key, + "(1) Releasing resource %s:%s acquired by expired %s", + f"{resource_name=}", + f"{resource_value=}", + f"{dead_key!r}", ) if resource_name == "project_id": @@ -206,6 +219,7 @@ async def remove_disconnected_user_resources( # ONLY GUESTS: if this user was a GUEST also remove it from the database # with the only associated project owned + # FIXME: if a guest can share, it will become permanent user! await remove_guest_user_with_all_its_resources( app=app, user_id=int(dead_key["user_id"]), @@ -213,8 +227,9 @@ async def remove_disconnected_user_resources( # (2) remove resource field in collected keys since (1) is completed logger.info( - "(2) Removing resource %s field entry from registry keys: %s", - resource_name, + "(2) Removing field for released resource %s:%s from registry keys: %s", + f"{resource_name=}", + f"{resource_value=}", keys_to_update, ) on_released_tasks = [ @@ -298,11 +313,11 @@ async def _remove_single_orphaned_service( # if the node does not exist in any project in the db # they can be safely remove it without saving any state if not await is_node_id_present_in_any_project_workbench(app, service_uuid): - message = ( + logger.info( "Will remove orphaned service without saving state since " - f"this service is not part of any project {service_host}" + "this service is not part of any project %s", + f"{service_host=}", ) - logger.info(message) try: await director_v2_api.stop_service(app, service_uuid, save_state=False) except (ServiceNotFoundError, DirectorException) as err: @@ -329,8 +344,8 @@ async def _remove_single_orphaned_service( # # a service state might be one of [pending, pulling, starting, running, complete, failed] logger.warning( - "Skipping %s since image is in %s", - service_host, + "Skipping %s since service state is %s", + f"{service_host=}", interactive_service.get("service_state", "unknown"), ) return From fb54e2e15ac5aa2b1717a3da00a40c99f2270c2b Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 13:44:31 +0200 Subject: [PATCH 05/13] improves registry annotations and doc --- .../resource_manager/registry.py | 48 ++++++++++++------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py index 073919d9cd2..61048ef1fbd 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py @@ -14,18 +14,29 @@ """ import logging -from typing import Dict, List, Tuple +from typing import TypedDict, Union import redis.asyncio as aioredis from aiohttp import web +from models_library.basic_types import UUIDStr from ..redis import get_redis_client from ._constants import APP_CLIENT_SOCKET_REGISTRY_KEY log = logging.getLogger(__name__) -RESOURCE_SUFFIX = "resources" ALIVE_SUFFIX = "alive" +RESOURCE_SUFFIX = "resources" + + +class RegistryKeyDict(TypedDict): + user_id: Union[str, int] + client_session_id: str + + +class ResourcesValueDict(TypedDict, total=False): + project_id: UUIDStr + socket_id: str class RedisResourceRegistry: @@ -33,6 +44,10 @@ class RedisResourceRegistry: redis structure is following Redis Hash: key=user_id:client_session_id values={server_id socket_id project_id} + + Example: + Key: user_id=1:client_session_id=7f40353b-db02-4474-a44d-23ce6a6e428c:alive = 1 + Key: user_id=1:client_session_id=7f40353b-db02-4474-a44d-23ce6a6e428c:resources = {project_id: ... , socket_id: ...} """ def __init__(self, app: web.Application): @@ -43,19 +58,19 @@ def app(self) -> web.Application: return self._app @classmethod - def _hash_key(cls, key: Dict[str, str]) -> str: + def _hash_key(cls, key: RegistryKeyDict) -> str: hash_key = ":".join(f"{item[0]}={item[1]}" for item in key.items()) return hash_key @classmethod - def _decode_hash_key(cls, hash_key: str) -> Dict[str, str]: + def _decode_hash_key(cls, hash_key: str) -> RegistryKeyDict: tmp_key = ( hash_key[: -len(f":{RESOURCE_SUFFIX}")] if hash_key.endswith(f":{RESOURCE_SUFFIX}") else hash_key[: -len(f":{ALIVE_SUFFIX}")] ) key = dict(x.split("=") for x in tmp_key.split(":")) - return key + return RegistryKeyDict(**key) @property def client(self) -> aioredis.Redis: @@ -63,23 +78,24 @@ def client(self) -> aioredis.Redis: return client async def set_resource( - self, key: Dict[str, str], resource: Tuple[str, str] + self, key: RegistryKeyDict, resource: tuple[str, str] ) -> None: hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" field, value = resource await self.client.hset(hash_key, mapping={field: value}) - async def get_resources(self, key: Dict[str, str]) -> Dict[str, str]: + async def get_resources(self, key: RegistryKeyDict) -> ResourcesValueDict: hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" - return await self.client.hgetall(hash_key) + fields = await self.client.hgetall(hash_key) + return ResourcesValueDict(**fields) - async def remove_resource(self, key: Dict[str, str], resource_name: str) -> None: + async def remove_resource(self, key: RegistryKeyDict, resource_name: str) -> None: hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" await self.client.hdel(hash_key, resource_name) async def find_resources( - self, key: Dict[str, str], resource_name: str - ) -> List[str]: + self, key: RegistryKeyDict, resource_name: str + ) -> list[str]: resources = [] # the key might only be partialy complete partial_hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" @@ -88,7 +104,7 @@ async def find_resources( resources.append(await self.client.hget(scanned_key, resource_name)) return resources - async def find_keys(self, resource: Tuple[str, str]) -> List[Dict[str, str]]: + async def find_keys(self, resource: tuple[str, str]) -> list[dict[str, str]]: keys = [] if not resource: return keys @@ -100,17 +116,17 @@ async def find_keys(self, resource: Tuple[str, str]) -> List[Dict[str, str]]: keys.append(self._decode_hash_key(hash_key)) return keys - async def set_key_alive(self, key: Dict[str, str], timeout: int) -> None: + async def set_key_alive(self, key: RegistryKeyDict, timeout: int) -> None: # setting the timeout to always expire, timeout > 0 timeout = int(max(1, timeout)) hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" await self.client.set(hash_key, 1, ex=timeout) - async def is_key_alive(self, key: Dict[str, str]) -> bool: + async def is_key_alive(self, key: RegistryKeyDict) -> bool: hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" return await self.client.exists(hash_key) > 0 - async def remove_key(self, key: Dict[str, str]) -> None: + async def remove_key(self, key: RegistryKeyDict) -> None: await self.client.delete( f"{self._hash_key(key)}:{RESOURCE_SUFFIX}", f"{self._hash_key(key)}:{ALIVE_SUFFIX}", @@ -118,7 +134,7 @@ async def remove_key(self, key: Dict[str, str]) -> None: async def get_all_resource_keys( self, - ) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + ) -> tuple[list[RegistryKeyDict], list[RegistryKeyDict]]: alive_keys = [ self._decode_hash_key(hash_key) async for hash_key in self.client.scan_iter(match=f"*:{ALIVE_SUFFIX}") From 4fb034f727c6a2149ecf7f35d9a184d684eed3ff Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 13:48:09 +0200 Subject: [PATCH 06/13] fixes log --- .../src/simcore_service_webserver/garbage_collector_core.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 76c6f445247..2d13e0bde70 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -49,8 +49,10 @@ def log_context(log: Callable, message: str): yield log("%s [DONE-SUCCEED]", message) + except Exception as e: # pylint: disable=broad-except log("%s [DONE-FAILED with %s]", message, type(e)) + raise async def collect_garbage(app: web.Application): From dd1d83dfb8fe0a2429fe7a754f316b27cf283806 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 14:37:22 +0200 Subject: [PATCH 07/13] fixes log utils format --- .../src/servicelib/logging_utils.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index fb234653a37..04809c4bd6d 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -9,7 +9,7 @@ from asyncio import iscoroutinefunction from contextlib import contextmanager from inspect import getframeinfo, stack -from typing import Callable, Dict, Optional +from typing import Callable, Optional log = logging.getLogger(__name__) @@ -63,7 +63,8 @@ def format(self, record): return super().format(record) -DEFAULT_FORMATTING = "%(levelname)s: [%(asctime)s/%(processName)s] [%(name)s:%(funcName)s(%(lineno)d)] %(message)s" +# SEE https://docs.python.org/3/library/logging.html#logrecord-attributes +DEFAULT_FORMATTING = "%(levelname)s: [%(asctime)s/%(processName)s] [%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" def config_all_loggers(): @@ -78,25 +79,19 @@ def config_all_loggers(): def set_logging_handler( logger: logging.Logger, - formatter_base=None, - formatting: Optional[str] = None, + formatter_base: Optional[type[logging.Formatter]] = None, + fmt: str = DEFAULT_FORMATTING, ) -> None: - if not formatting: - formatting = DEFAULT_FORMATTING if not formatter_base: formatter_base = CustomFormatter for handler in logger.handlers: - handler.setFormatter( - formatter_base( - "%(levelname)s: %(name)s:%(funcName)s(%(lineno)s) - %(message)s" - ) - ) + handler.setFormatter(formatter_base(fmt)) def _log_arguments( logger_obj: logging.Logger, level: int, func: Callable, *args, **kwargs -) -> Dict[str, str]: +) -> dict[str, str]: args_passed_in_function = [repr(a) for a in args] kwargs_passed_in_function = [f"{k}={v!r}" for k, v in kwargs.items()] From f5b881e3f3d3ddae4c3d690a5ef29c80222636c1 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 14:39:00 +0200 Subject: [PATCH 08/13] cleanup log msg and linter --- .../garbage_collector_core.py | 7 +++++-- .../garbage_collector_task.py | 12 +++++------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 2d13e0bde70..7cacfbc686e 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -43,15 +43,18 @@ @contextmanager def log_context(log: Callable, message: str): + """Logs entering/existing context as start/done and informs if error""" + # NOTE: could have been done with a LoggerAdapter but wonder if it would work + # with a global logger and asyncio context switches try: log("%s [STARTING]", message) yield - log("%s [DONE-SUCCEED]", message) + log("%s [DONE w/ SUCCESS]", message) except Exception as e: # pylint: disable=broad-except - log("%s [DONE-FAILED with %s]", message, type(e)) + log("%s [DONE w/ ERROR %s]", message, type(e)) raise diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py index 89deb8670d8..8d7b4987f3e 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py @@ -62,40 +62,38 @@ async def run_background_task(app: web.Application): async def collect_garbage_periodically(app: web.Application): - LOG_PREFIX = "Garbage Collector cycle " settings: GarbageCollectorSettings = get_plugin_settings(app) while True: try: interval = settings.GARBAGE_COLLECTOR_INTERVAL_S while True: - logger.info(LOG_PREFIX + "starting ...") + logger.info("Starting ...") await collect_garbage(app) if app[GC_TASK_CONFIG].get("force_stop", False): raise RuntimeError("Forced to stop garbage collection") - logger.info(LOG_PREFIX + "completed: pausing for %ss", f"{interval=}") + logger.info("Completed: pausing for %ss", f"{interval=}") await asyncio.sleep(interval) except asyncio.CancelledError: logger.info( - LOG_PREFIX + "stopped:" - "Garbage collection task was cancelled, it will not restart!" + "Stopped:" "Garbage collection task was cancelled, it will not restart!" ) # do not catch Cancellation errors raise except Exception: # pylint: disable=broad-except logger.warning( - LOG_PREFIX + "stopped:" + "Stopped:" "There was an error during garbage collection, restarting...", exc_info=True, ) if app[GC_TASK_CONFIG].get("force_stop", False): - logger.warning(LOG_PREFIX + ": Forced to stop garbage collection") + logger.warning("Forced to stop garbage collection") break # will wait 5 seconds to recover before restarting to avoid restart loops From bc7bedabe0e2ba2100bd4432c414d21a7166f33f Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 15:02:22 +0200 Subject: [PATCH 09/13] improves log --- .../garbage_collector_core.py | 26 ++++------------- .../garbage_collector_task.py | 18 ++++++------ .../garbage_collector_utils.py | 29 +++++++++++++++---- 3 files changed, 39 insertions(+), 34 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 7cacfbc686e..454ce158119 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -4,9 +4,8 @@ import asyncio import logging -from contextlib import contextmanager from itertools import chain -from typing import Any, Callable, Optional +from typing import Any, Optional import asyncpg.exceptions from aiohttp import web @@ -18,7 +17,11 @@ from . import director_v2_api, users_exceptions from .director.director_exceptions import DirectorException, ServiceNotFoundError from .garbage_collector_settings import GUEST_USER_RC_LOCK_FORMAT -from .garbage_collector_utils import get_new_project_owner_gid, replace_current_owner +from .garbage_collector_utils import ( + get_new_project_owner_gid, + log_context, + replace_current_owner, +) from .projects.projects_api import ( get_project_for_user, get_workbench_node_ids_from_project_uuid, @@ -41,23 +44,6 @@ logger = logging.getLogger(__name__) -@contextmanager -def log_context(log: Callable, message: str): - """Logs entering/existing context as start/done and informs if error""" - # NOTE: could have been done with a LoggerAdapter but wonder if it would work - # with a global logger and asyncio context switches - try: - log("%s [STARTING]", message) - - yield - - log("%s [DONE w/ SUCCESS]", message) - - except Exception as e: # pylint: disable=broad-except - log("%s [DONE w/ ERROR %s]", message, type(e)) - raise - - async def collect_garbage(app: web.Application): """ Garbage collection has the task of removing trash (i.e. unused resources) from the system. The trash diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py index 8d7b4987f3e..48d5a0176a4 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py @@ -11,6 +11,7 @@ from .garbage_collector_core import collect_garbage from .garbage_collector_settings import GarbageCollectorSettings, get_plugin_settings +from .garbage_collector_utils import log_context logger = logging.getLogger(__name__) @@ -63,29 +64,28 @@ async def run_background_task(app: web.Application): async def collect_garbage_periodically(app: web.Application): settings: GarbageCollectorSettings = get_plugin_settings(app) + interval = settings.GARBAGE_COLLECTOR_INTERVAL_S while True: try: - interval = settings.GARBAGE_COLLECTOR_INTERVAL_S while True: - logger.info("Starting ...") + with log_context(logger.info, "Garbage collect cycle"): + await collect_garbage(app) - await collect_garbage(app) + if app[GC_TASK_CONFIG].get("force_stop", False): + raise RuntimeError("Forced to stop garbage collection") - if app[GC_TASK_CONFIG].get("force_stop", False): - raise RuntimeError("Forced to stop garbage collection") - - logger.info("Completed: pausing for %ss", f"{interval=}") + logger.info("Garbage collect cycle pauses %ss", interval) await asyncio.sleep(interval) - except asyncio.CancelledError: + except asyncio.CancelledError: # EXIT logger.info( "Stopped:" "Garbage collection task was cancelled, it will not restart!" ) # do not catch Cancellation errors raise - except Exception: # pylint: disable=broad-except + except Exception: # RESILIENT restart # pylint: disable=broad-except logger.warning( "Stopped:" "There was an error during garbage collection, restarting...", diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py b/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py index 9e124e4754d..b03645e5dc1 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_utils.py @@ -1,5 +1,6 @@ import logging -from typing import Dict, Optional, Set +from contextlib import contextmanager +from typing import Callable, Optional import asyncpg.exceptions from aiohttp import web @@ -20,7 +21,7 @@ async def _fetch_new_project_owner_from_groups( - app: web.Application, standard_groups: Dict, user_id: UserID + app: web.Application, standard_groups: dict, user_id: UserID ) -> Optional[UserID]: """Iterate over all the users in a group and if the users exists in the db return its gid @@ -52,7 +53,7 @@ async def get_new_project_owner_gid( project_uuid: str, user_id: UserID, user_primary_gid: GroupID, - project: Dict, + project: dict, ) -> Optional[GroupID]: """Goes through the access rights and tries to find a new suitable owner. The first viable user is selected as a new owner. @@ -62,7 +63,7 @@ async def get_new_project_owner_gid( access_rights = project["accessRights"] # A Set[str] is prefered over Set[int] because access_writes # is a Dict with only key,valus in {str, None} - other_users_access_rights: Set[str] = set(access_rights.keys()) - { + other_users_access_rights: set[str] = set(access_rights.keys()) - { f"{user_primary_gid}" } logger.debug( @@ -123,7 +124,7 @@ async def replace_current_owner( project_uuid: str, user_primary_gid: GroupID, new_project_owner_gid: GroupID, - project: Dict, + project: dict, ) -> None: try: new_project_owner_id = await get_user_id_from_gid( @@ -173,3 +174,21 @@ async def replace_current_owner( "Could not remove old owner and replaced it with user %s", new_project_owner_id, ) + + +@contextmanager +def log_context(log: Callable, message: str): + """Logs entering/existing context as start/done and informs if exited with error""" + # NOTE: could have been done with a LoggerAdapter but wonder if it would work + # with a global logger and asyncio context switches + # PC: I still do not find useful enought to move it to e.g. servicelib.logging_utils + try: + log("%s [STARTING]", message) + + yield + + log("%s [DONE w/ SUCCESS]", message) + + except Exception as e: # pylint: disable=broad-except + log("%s [DONE w/ ERROR %s]", message, type(e)) + raise From 795deeee13833df0a4a43c54fd92369d3eb940a0 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 15:10:23 +0200 Subject: [PATCH 10/13] types --- .../garbage_collector_core.py | 1 + .../resource_manager/registry.py | 30 +++++++++++-------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py index 454ce158119..94314cd01af 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_core.py @@ -147,6 +147,7 @@ async def remove_disconnected_user_resources( ) for resource_name, resource_value in dead_key_resources.items(): + resource_value = f"{resource_value}" # Releasing a resource consists of two steps # - (1) release actual resource (e.g. stop service, close project, deallocate memory, etc) diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py index 61048ef1fbd..0201ce8b2f9 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py @@ -29,7 +29,9 @@ RESOURCE_SUFFIX = "resources" -class RegistryKeyDict(TypedDict): +class RegistryKeyPrefixDict(TypedDict): + """Parts of the redis key w/o suffix""" + user_id: Union[str, int] client_session_id: str @@ -58,19 +60,19 @@ def app(self) -> web.Application: return self._app @classmethod - def _hash_key(cls, key: RegistryKeyDict) -> str: + def _hash_key(cls, key: RegistryKeyPrefixDict) -> str: hash_key = ":".join(f"{item[0]}={item[1]}" for item in key.items()) return hash_key @classmethod - def _decode_hash_key(cls, hash_key: str) -> RegistryKeyDict: + def _decode_hash_key(cls, hash_key: str) -> RegistryKeyPrefixDict: tmp_key = ( hash_key[: -len(f":{RESOURCE_SUFFIX}")] if hash_key.endswith(f":{RESOURCE_SUFFIX}") else hash_key[: -len(f":{ALIVE_SUFFIX}")] ) key = dict(x.split("=") for x in tmp_key.split(":")) - return RegistryKeyDict(**key) + return RegistryKeyPrefixDict(**key) @property def client(self) -> aioredis.Redis: @@ -78,23 +80,25 @@ def client(self) -> aioredis.Redis: return client async def set_resource( - self, key: RegistryKeyDict, resource: tuple[str, str] + self, key: RegistryKeyPrefixDict, resource: tuple[str, str] ) -> None: hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" field, value = resource await self.client.hset(hash_key, mapping={field: value}) - async def get_resources(self, key: RegistryKeyDict) -> ResourcesValueDict: + async def get_resources(self, key: RegistryKeyPrefixDict) -> ResourcesValueDict: hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" fields = await self.client.hgetall(hash_key) return ResourcesValueDict(**fields) - async def remove_resource(self, key: RegistryKeyDict, resource_name: str) -> None: + async def remove_resource( + self, key: RegistryKeyPrefixDict, resource_name: str + ) -> None: hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" await self.client.hdel(hash_key, resource_name) async def find_resources( - self, key: RegistryKeyDict, resource_name: str + self, key: RegistryKeyPrefixDict, resource_name: str ) -> list[str]: resources = [] # the key might only be partialy complete @@ -104,7 +108,7 @@ async def find_resources( resources.append(await self.client.hget(scanned_key, resource_name)) return resources - async def find_keys(self, resource: tuple[str, str]) -> list[dict[str, str]]: + async def find_keys(self, resource: tuple[str, str]) -> list[RegistryKeyPrefixDict]: keys = [] if not resource: return keys @@ -116,17 +120,17 @@ async def find_keys(self, resource: tuple[str, str]) -> list[dict[str, str]]: keys.append(self._decode_hash_key(hash_key)) return keys - async def set_key_alive(self, key: RegistryKeyDict, timeout: int) -> None: + async def set_key_alive(self, key: RegistryKeyPrefixDict, timeout: int) -> None: # setting the timeout to always expire, timeout > 0 timeout = int(max(1, timeout)) hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" await self.client.set(hash_key, 1, ex=timeout) - async def is_key_alive(self, key: RegistryKeyDict) -> bool: + async def is_key_alive(self, key: RegistryKeyPrefixDict) -> bool: hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" return await self.client.exists(hash_key) > 0 - async def remove_key(self, key: RegistryKeyDict) -> None: + async def remove_key(self, key: RegistryKeyPrefixDict) -> None: await self.client.delete( f"{self._hash_key(key)}:{RESOURCE_SUFFIX}", f"{self._hash_key(key)}:{ALIVE_SUFFIX}", @@ -134,7 +138,7 @@ async def remove_key(self, key: RegistryKeyDict) -> None: async def get_all_resource_keys( self, - ) -> tuple[list[RegistryKeyDict], list[RegistryKeyDict]]: + ) -> tuple[list[RegistryKeyPrefixDict], list[RegistryKeyPrefixDict]]: alive_keys = [ self._decode_hash_key(hash_key) async for hash_key in self.client.scan_iter(match=f"*:{ALIVE_SUFFIX}") From 7f45d7bd76a47920656ee5ccf41b2881d0548db7 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 15:12:21 +0200 Subject: [PATCH 11/13] code smell --- .../src/simcore_service_webserver/garbage_collector_task.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py index 48d5a0176a4..5cd0094b1c0 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector_task.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector_task.py @@ -80,15 +80,14 @@ async def collect_garbage_periodically(app: web.Application): except asyncio.CancelledError: # EXIT logger.info( - "Stopped:" "Garbage collection task was cancelled, it will not restart!" + "Stopped: Garbage collection task was cancelled, it will not restart!" ) # do not catch Cancellation errors raise except Exception: # RESILIENT restart # pylint: disable=broad-except logger.warning( - "Stopped:" - "There was an error during garbage collection, restarting...", + "Stopped: There was an error during garbage collection, restarting...", exc_info=True, ) From 45d9cca24d02a8ae202c27fedb372c350a3188bd Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 15:15:05 +0200 Subject: [PATCH 12/13] renames log module to avoid name conflicts --- .../src/simcore_service_webserver/cli.py | 5 ++-- .../{log.py => logs.py} | 0 .../01/test_studies_dispatcher_handlers.py | 6 ++-- .../01/test_studies_dispatcher_projects.py | 2 +- .../test_studies_dispatcher_studies_access.py | 28 +++++++++---------- .../with_dbs/03/meta_modeling/conftest.py | 6 ++-- .../with_dbs/03/version_control/conftest.py | 11 ++++---- 7 files changed, 28 insertions(+), 30 deletions(-) rename services/web/server/src/simcore_service_webserver/{log.py => logs.py} (100%) diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index 6a44bfa1195..1c3784d1fc2 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -16,7 +16,6 @@ import logging import os -from typing import Dict, Tuple import typer from aiohttp import web @@ -25,7 +24,7 @@ from .application import create_application, run_service from .application_settings import ApplicationSettings from .application_settings_utils import convert_to_app_config -from .log import setup_logging +from .logs import setup_logging # ptsvd cause issues with ProcessPoolExecutor # SEE: https://github.com/microsoft/ptvsd/issues/1443 @@ -39,7 +38,7 @@ def _setup_app_from_settings( settings: ApplicationSettings, -) -> Tuple[web.Application, Dict]: +) -> tuple[web.Application, dict]: # NOTE: keeping an equivalent config allows us # to keep some of the code from the previous diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/logs.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/log.py rename to services/web/server/src/simcore_service_webserver/logs.py diff --git a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py index ffbfd18a1b1..9636e2531cb 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py @@ -7,7 +7,7 @@ import urllib.parse from copy import deepcopy from pprint import pprint -from typing import Iterator, Tuple +from typing import Iterator import pytest import sqlalchemy as sa @@ -17,7 +17,7 @@ from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import UserRole from simcore_service_webserver import catalog -from simcore_service_webserver.log import setup_logging +from simcore_service_webserver.logs import setup_logging from simcore_service_webserver.studies_dispatcher._core import ViewerInfo from sqlalchemy.sql import text from yarl import URL @@ -395,7 +395,7 @@ async def _get_user_projects(): assert mock_client_director_v2_func.called -def assert_error_in_fragment(resp: ClientResponse) -> Tuple[str, int]: +def assert_error_in_fragment(resp: ClientResponse) -> tuple[str, int]: # Expects fragment to indicate client where to find newly created project unquoted_fragment = urllib.parse.unquote_plus(resp.real_url.fragment) match = re.match(r"/error\?(.+)", unquoted_fragment) diff --git a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py index 597fae06617..23ac18deda1 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py +++ b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py @@ -14,7 +14,7 @@ from pytest_simcore.helpers.utils_projects import delete_all_projects from pytest_simcore.helpers.utils_services import list_fake_file_consumers from simcore_service_webserver.groups_api import auto_add_user_to_groups -from simcore_service_webserver.log import setup_logging +from simcore_service_webserver.logs import setup_logging from simcore_service_webserver.projects.projects_api import get_project_for_user from simcore_service_webserver.studies_dispatcher._projects import ( UserInfo, diff --git a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py index d9aa2dfffc9..af07fc6866e 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py @@ -12,7 +12,7 @@ from copy import deepcopy from pathlib import Path from pprint import pprint -from typing import AsyncIterator, Callable, Dict +from typing import AsyncIterator, Callable import pytest from aiohttp import ClientResponse, ClientSession, web @@ -26,7 +26,7 @@ from servicelib.aiohttp.rest_responses import unwrap_envelope from settings_library.redis import RedisSettings from simcore_service_webserver import catalog -from simcore_service_webserver.log import setup_logging +from simcore_service_webserver.logs import setup_logging from simcore_service_webserver.projects.projects_api import submit_delete_project_task from simcore_service_webserver.users_api import delete_user, get_user_role @@ -92,7 +92,7 @@ def app_cfg( @pytest.fixture async def published_project( client, fake_project, tests_data_dir: Path -) -> AsyncIterator[Dict]: +) -> AsyncIterator[dict]: project_data = deepcopy(fake_project) project_data["name"] = "Published project" project_data["uuid"] = SHARED_STUDY_UUID @@ -143,20 +143,18 @@ async def _get_user_projects(client): return projects -def _assert_same_projects(got: Dict, expected: Dict): +def _assert_same_projects(got: dict, expected: dict): # TODO: validate using api/specs/webserver/v0/components/schemas/project-v0.0.1.json # TODO: validate workbench! - exclude = set( - [ - "creationDate", - "lastChangeDate", - "prjOwner", - "uuid", - "workbench", - "accessRights", - "ui", - ] - ) + exclude = { + "creationDate", + "lastChangeDate", + "prjOwner", + "uuid", + "workbench", + "accessRights", + "ui", + } for key in expected.keys(): if key not in exclude: assert got[key] == expected[key], "Failed in %s" % key diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py index 03ee7a9ef10..8c771873184 100644 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py @@ -4,11 +4,11 @@ import logging from copy import deepcopy -from typing import Any, Dict +from typing import Any import pytest from simcore_postgres_database.models.users import UserRole -from simcore_service_webserver.log import setup_logging +from simcore_service_webserver.logs import setup_logging # HELPERS @@ -23,7 +23,7 @@ def user_role() -> UserRole: @pytest.fixture -def app_cfg(default_app_cfg, unused_tcp_port_factory, monkeypatch) -> Dict[str, Any]: +def app_cfg(default_app_cfg, unused_tcp_port_factory, monkeypatch) -> dict[str, Any]: """App's configuration used for every test in this module NOTE: Overrides services/web/server/tests/unit/with_dbs/conftest.py::app_cfg to influence app setup diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py index dd602e52839..235d370d31c 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py @@ -5,7 +5,7 @@ import logging from copy import deepcopy from pathlib import Path -from typing import Any, AsyncIterator, Awaitable, Callable, Dict +from typing import Any, AsyncIterator, Awaitable, Callable from unittest import mock from uuid import UUID @@ -26,10 +26,11 @@ from simcore_service_webserver._meta import API_VTAG as VX from simcore_service_webserver.db import APP_DB_ENGINE_KEY from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.log import setup_logging -from tenacity import AsyncRetrying, stop_after_delay +from simcore_service_webserver.logs import setup_logging +from tenacity._asyncio import AsyncRetrying +from tenacity.stop import stop_after_delay -ProjectDict = Dict[str, Any] +ProjectDict = dict[str, Any] # HELPERS @@ -77,7 +78,7 @@ async def mocked_get_services_for_user(*args, **kwargs): @pytest.fixture def app_cfg( default_app_cfg, unused_tcp_port_factory, catalog_subsystem_mock, monkeypatch -) -> Dict[str, Any]: +) -> dict[str, Any]: """App's configuration used for every test in this module NOTE: Overrides services/web/server/tests/unit/with_dbs/conftest.py::app_cfg to influence app setup From f7953961aff64fbe96b36c68fe8c0939aabf9998 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Wed, 22 Jun 2022 15:21:28 +0200 Subject: [PATCH 13/13] Revert "renames log module to avoid name conflicts" This reverts commit 45d9cca24d02a8ae202c27fedb372c350a3188bd. --- .../src/simcore_service_webserver/cli.py | 5 ++-- .../{logs.py => log.py} | 0 .../01/test_studies_dispatcher_handlers.py | 6 ++-- .../01/test_studies_dispatcher_projects.py | 2 +- .../test_studies_dispatcher_studies_access.py | 28 ++++++++++--------- .../with_dbs/03/meta_modeling/conftest.py | 6 ++-- .../with_dbs/03/version_control/conftest.py | 11 ++++---- 7 files changed, 30 insertions(+), 28 deletions(-) rename services/web/server/src/simcore_service_webserver/{logs.py => log.py} (100%) diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index 1c3784d1fc2..6a44bfa1195 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -16,6 +16,7 @@ import logging import os +from typing import Dict, Tuple import typer from aiohttp import web @@ -24,7 +25,7 @@ from .application import create_application, run_service from .application_settings import ApplicationSettings from .application_settings_utils import convert_to_app_config -from .logs import setup_logging +from .log import setup_logging # ptsvd cause issues with ProcessPoolExecutor # SEE: https://github.com/microsoft/ptvsd/issues/1443 @@ -38,7 +39,7 @@ def _setup_app_from_settings( settings: ApplicationSettings, -) -> tuple[web.Application, dict]: +) -> Tuple[web.Application, Dict]: # NOTE: keeping an equivalent config allows us # to keep some of the code from the previous diff --git a/services/web/server/src/simcore_service_webserver/logs.py b/services/web/server/src/simcore_service_webserver/log.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/logs.py rename to services/web/server/src/simcore_service_webserver/log.py diff --git a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py index 9636e2531cb..ffbfd18a1b1 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_handlers.py @@ -7,7 +7,7 @@ import urllib.parse from copy import deepcopy from pprint import pprint -from typing import Iterator +from typing import Iterator, Tuple import pytest import sqlalchemy as sa @@ -17,7 +17,7 @@ from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import UserRole from simcore_service_webserver import catalog -from simcore_service_webserver.logs import setup_logging +from simcore_service_webserver.log import setup_logging from simcore_service_webserver.studies_dispatcher._core import ViewerInfo from sqlalchemy.sql import text from yarl import URL @@ -395,7 +395,7 @@ async def _get_user_projects(): assert mock_client_director_v2_func.called -def assert_error_in_fragment(resp: ClientResponse) -> tuple[str, int]: +def assert_error_in_fragment(resp: ClientResponse) -> Tuple[str, int]: # Expects fragment to indicate client where to find newly created project unquoted_fragment = urllib.parse.unquote_plus(resp.real_url.fragment) match = re.match(r"/error\?(.+)", unquoted_fragment) diff --git a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py index 23ac18deda1..597fae06617 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py +++ b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_projects.py @@ -14,7 +14,7 @@ from pytest_simcore.helpers.utils_projects import delete_all_projects from pytest_simcore.helpers.utils_services import list_fake_file_consumers from simcore_service_webserver.groups_api import auto_add_user_to_groups -from simcore_service_webserver.logs import setup_logging +from simcore_service_webserver.log import setup_logging from simcore_service_webserver.projects.projects_api import get_project_for_user from simcore_service_webserver.studies_dispatcher._projects import ( UserInfo, diff --git a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py index af07fc6866e..d9aa2dfffc9 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/01/test_studies_dispatcher_studies_access.py @@ -12,7 +12,7 @@ from copy import deepcopy from pathlib import Path from pprint import pprint -from typing import AsyncIterator, Callable +from typing import AsyncIterator, Callable, Dict import pytest from aiohttp import ClientResponse, ClientSession, web @@ -26,7 +26,7 @@ from servicelib.aiohttp.rest_responses import unwrap_envelope from settings_library.redis import RedisSettings from simcore_service_webserver import catalog -from simcore_service_webserver.logs import setup_logging +from simcore_service_webserver.log import setup_logging from simcore_service_webserver.projects.projects_api import submit_delete_project_task from simcore_service_webserver.users_api import delete_user, get_user_role @@ -92,7 +92,7 @@ def app_cfg( @pytest.fixture async def published_project( client, fake_project, tests_data_dir: Path -) -> AsyncIterator[dict]: +) -> AsyncIterator[Dict]: project_data = deepcopy(fake_project) project_data["name"] = "Published project" project_data["uuid"] = SHARED_STUDY_UUID @@ -143,18 +143,20 @@ async def _get_user_projects(client): return projects -def _assert_same_projects(got: dict, expected: dict): +def _assert_same_projects(got: Dict, expected: Dict): # TODO: validate using api/specs/webserver/v0/components/schemas/project-v0.0.1.json # TODO: validate workbench! - exclude = { - "creationDate", - "lastChangeDate", - "prjOwner", - "uuid", - "workbench", - "accessRights", - "ui", - } + exclude = set( + [ + "creationDate", + "lastChangeDate", + "prjOwner", + "uuid", + "workbench", + "accessRights", + "ui", + ] + ) for key in expected.keys(): if key not in exclude: assert got[key] == expected[key], "Failed in %s" % key diff --git a/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py b/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py index 8c771873184..03ee7a9ef10 100644 --- a/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/meta_modeling/conftest.py @@ -4,11 +4,11 @@ import logging from copy import deepcopy -from typing import Any +from typing import Any, Dict import pytest from simcore_postgres_database.models.users import UserRole -from simcore_service_webserver.logs import setup_logging +from simcore_service_webserver.log import setup_logging # HELPERS @@ -23,7 +23,7 @@ def user_role() -> UserRole: @pytest.fixture -def app_cfg(default_app_cfg, unused_tcp_port_factory, monkeypatch) -> dict[str, Any]: +def app_cfg(default_app_cfg, unused_tcp_port_factory, monkeypatch) -> Dict[str, Any]: """App's configuration used for every test in this module NOTE: Overrides services/web/server/tests/unit/with_dbs/conftest.py::app_cfg to influence app setup diff --git a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py index 235d370d31c..dd602e52839 100644 --- a/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/version_control/conftest.py @@ -5,7 +5,7 @@ import logging from copy import deepcopy from pathlib import Path -from typing import Any, AsyncIterator, Awaitable, Callable +from typing import Any, AsyncIterator, Awaitable, Callable, Dict from unittest import mock from uuid import UUID @@ -26,11 +26,10 @@ from simcore_service_webserver._meta import API_VTAG as VX from simcore_service_webserver.db import APP_DB_ENGINE_KEY from simcore_service_webserver.db_models import UserRole -from simcore_service_webserver.logs import setup_logging -from tenacity._asyncio import AsyncRetrying -from tenacity.stop import stop_after_delay +from simcore_service_webserver.log import setup_logging +from tenacity import AsyncRetrying, stop_after_delay -ProjectDict = dict[str, Any] +ProjectDict = Dict[str, Any] # HELPERS @@ -78,7 +77,7 @@ async def mocked_get_services_for_user(*args, **kwargs): @pytest.fixture def app_cfg( default_app_cfg, unused_tcp_port_factory, catalog_subsystem_mock, monkeypatch -) -> dict[str, Any]: +) -> Dict[str, Any]: """App's configuration used for every test in this module NOTE: Overrides services/web/server/tests/unit/with_dbs/conftest.py::app_cfg to influence app setup